Compare commits
3 Commits
master
...
pyinstalle
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd854e2f3c | ||
|
|
6ccca2f00c | ||
|
|
8f60d5ec90 |
2
.github/ISSUE_TEMPLATE.md
vendored
@ -1,2 +0,0 @@
|
||||
<!-- Note: This website is for bug reports, not general questions.
|
||||
Do not post issues about non-bitcoin versions of Electrum. -->
|
||||
15
.gitignore
vendored
@ -4,24 +4,19 @@
|
||||
build/
|
||||
dist/
|
||||
*.egg/
|
||||
/electrum.py
|
||||
contrib/pyinstaller/
|
||||
Electrum.egg-info/
|
||||
electrum/locale/
|
||||
gui/qt/icons_rc.py
|
||||
locale/
|
||||
.devlocaltmp/
|
||||
*_trial_temp
|
||||
packages
|
||||
env/
|
||||
.tox/
|
||||
.buildozer/
|
||||
bin/
|
||||
/app.fil
|
||||
.idea
|
||||
|
||||
# icons
|
||||
electrum/gui/kivy/theming/light-0.png
|
||||
electrum/gui/kivy/theming/light.atlas
|
||||
|
||||
# tests/tox
|
||||
.tox/
|
||||
# tox files
|
||||
.cache/
|
||||
.coverage
|
||||
.pytest_cache
|
||||
|
||||
6
.gitmodules
vendored
@ -1,6 +0,0 @@
|
||||
[submodule "contrib/deterministic-build/electrum-locale"]
|
||||
path = contrib/deterministic-build/electrum-locale
|
||||
url = https://github.com/spesmilo/electrum-locale
|
||||
[submodule "contrib/CalinsQRReader"]
|
||||
path = contrib/osx/CalinsQRReader
|
||||
url = https://github.com/spesmilo/CalinsQRReader
|
||||
92
.travis.yml
@ -1,94 +1,10 @@
|
||||
sudo: true
|
||||
dist: xenial
|
||||
sudo: false
|
||||
language: python
|
||||
python:
|
||||
- 3.6
|
||||
- 3.7
|
||||
git:
|
||||
depth: false
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- sourceline: 'ppa:tah83/secp256k1'
|
||||
packages:
|
||||
- libsecp256k1-0
|
||||
before_install:
|
||||
- git tag
|
||||
- "2.7"
|
||||
install:
|
||||
- pip install -r contrib/requirements/requirements-travis.txt
|
||||
cache:
|
||||
- pip: true
|
||||
- directories:
|
||||
- /tmp/electrum-build
|
||||
- pip install tox
|
||||
script:
|
||||
- tox
|
||||
after_success:
|
||||
- if [ "$TRAVIS_BRANCH" = "master" ]; then pip install requests && contrib/make_locale; fi
|
||||
- coveralls
|
||||
jobs:
|
||||
include:
|
||||
- stage: binary builds
|
||||
name: "Windows build"
|
||||
sudo: true
|
||||
language: c
|
||||
python: false
|
||||
env:
|
||||
- TARGET_OS=Windows
|
||||
services:
|
||||
- docker
|
||||
install:
|
||||
- sudo docker build --no-cache -t electrum-wine-builder-img ./contrib/build-wine/docker/
|
||||
script:
|
||||
- sudo docker run --name electrum-wine-builder-cont -v $PWD:/opt/wine64/drive_c/electrum --rm --workdir /opt/wine64/drive_c/electrum/contrib/build-wine electrum-wine-builder-img ./build.sh
|
||||
after_success: true
|
||||
- name: "Android build"
|
||||
language: python
|
||||
python: 3.7
|
||||
env:
|
||||
# reset API key to not have make_locale upload stuff here
|
||||
- crowdin_api_key=
|
||||
services:
|
||||
- docker
|
||||
install:
|
||||
- pip install requests && ./contrib/make_locale
|
||||
- ./contrib/make_packages
|
||||
- sudo docker build --no-cache -t electrum-android-builder-img electrum/gui/kivy/tools
|
||||
script:
|
||||
- sudo chown -R 1000:1000 .
|
||||
# Output something every minute or Travis kills the job
|
||||
- while sleep 60; do echo "=====[ $SECONDS seconds still running ]====="; done &
|
||||
- sudo docker run -it -u 1000:1000 --rm --name electrum-android-builder-cont -v $PWD:/home/user/wspace/electrum --workdir /home/user/wspace/electrum electrum-android-builder-img ./contrib/make_apk
|
||||
# kill background sleep loop
|
||||
- kill %1
|
||||
- ls -la bin
|
||||
- if [ $(ls bin | grep -c Electrum-*) -eq 0 ]; then exit 1; fi
|
||||
after_success: true
|
||||
- name: "MacOS build"
|
||||
os: osx
|
||||
language: c
|
||||
env:
|
||||
- TARGET_OS=macOS
|
||||
python: false
|
||||
install:
|
||||
- git fetch --all --tags
|
||||
script: ./contrib/osx/make_osx
|
||||
after_script: ls -lah dist && md5 dist/*
|
||||
after_success: true
|
||||
- name: "AppImage build"
|
||||
sudo: true
|
||||
language: c
|
||||
python: false
|
||||
services:
|
||||
- docker
|
||||
install:
|
||||
- sudo docker build --no-cache -t electrum-appimage-builder-img ./contrib/build-linux/appimage/
|
||||
script:
|
||||
- sudo docker run --name electrum-appimage-builder-cont -v $PWD:/opt/electrum --rm --workdir /opt/electrum/contrib/build-linux/appimage electrum-appimage-builder-img ./build.sh
|
||||
after_success: true
|
||||
- stage: release check
|
||||
install:
|
||||
- git fetch --all --tags
|
||||
script:
|
||||
- ./contrib/deterministic-build/check_submodules.sh
|
||||
after_success: true
|
||||
if: tag IS present
|
||||
- if [ "$TRAVIS_BRANCH" = "master" ]; then pip install pycurl requests && contrib/make_locale; fi
|
||||
|
||||
13
AUTHORS
@ -1,5 +1,3 @@
|
||||
Electrum-BTC
|
||||
------------
|
||||
ThomasV - Creator and maintainer.
|
||||
Animazing / Tachikoma - Styled the new GUI. Mac version.
|
||||
Azelphur - GUI stuff.
|
||||
@ -11,13 +9,4 @@ Genjix - Porting pro-mode functionality to lite-gui and worked on server
|
||||
Slush - Work on the server. Designed the original Stratum spec.
|
||||
Julian Toash (Tuxavant) - Various fixes to the client.
|
||||
rdymac - Website and translations.
|
||||
kyuupichan - Miscellaneous.
|
||||
|
||||
|
||||
FLO-Electrum
|
||||
------------
|
||||
vivekteega - Maintainer and remaining stuff
|
||||
Bitspill - Bootstraped the project with core FLO changes
|
||||
Rohit Tripathy - Ideation and problem solving
|
||||
akhil2015 - Flodata and scrypt hashing
|
||||
|
||||
kyuupichan - Miscellaneous.
|
||||
22
Info.plist
Normal file
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>bitcoin</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>bitcoin</string>
|
||||
</array>
|
||||
</dict>
|
||||
</array>
|
||||
<key>LSArchitecturePriority</key>
|
||||
<array>
|
||||
<string>x86_64</string>
|
||||
<string>i386</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
||||
19
MANIFEST.in
@ -1,17 +1,16 @@
|
||||
include LICENCE RELEASE-NOTES AUTHORS
|
||||
include README.rst
|
||||
include electrum.conf.sample
|
||||
include electrum.desktop
|
||||
include *.py
|
||||
include run_electrum
|
||||
include contrib/requirements/requirements.txt
|
||||
include contrib/requirements/requirements-hw.txt
|
||||
include electrum
|
||||
recursive-include lib *.py
|
||||
recursive-include gui *.py
|
||||
recursive-include plugins *.py
|
||||
recursive-include packages *.py
|
||||
recursive-include packages cacert.pem
|
||||
include app.fil
|
||||
include icons.qrc
|
||||
recursive-include icons *
|
||||
recursive-include scripts *
|
||||
|
||||
graft electrum
|
||||
prune electrum/tests
|
||||
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[co~]
|
||||
global-exclude *.py.orig
|
||||
global-exclude *.py.rej
|
||||
|
||||
57
README.rst
@ -5,19 +5,13 @@ Electrum - Lightweight Bitcoin client
|
||||
|
||||
Licence: MIT Licence
|
||||
Author: Thomas Voegtlin
|
||||
Language: Python (>= 3.6)
|
||||
Language: Python
|
||||
Homepage: https://electrum.org/
|
||||
|
||||
|
||||
.. image:: https://travis-ci.org/spesmilo/electrum.svg?branch=master
|
||||
:target: https://travis-ci.org/spesmilo/electrum
|
||||
:alt: Build Status
|
||||
.. image:: https://coveralls.io/repos/github/spesmilo/electrum/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/spesmilo/electrum?branch=master
|
||||
:alt: Test coverage statistics
|
||||
.. image:: https://d322cqt584bo4o.cloudfront.net/electrum/localized.svg
|
||||
:target: https://crowdin.com/project/electrum
|
||||
:alt: Help translate Electrum online
|
||||
|
||||
|
||||
|
||||
@ -29,24 +23,21 @@ Getting started
|
||||
Electrum is a pure python application. If you want to use the
|
||||
Qt interface, install the Qt dependencies::
|
||||
|
||||
sudo apt-get install python3-pyqt5
|
||||
sudo apt-get install python-qt4
|
||||
|
||||
If you downloaded the official package (tar.gz), you can run
|
||||
Electrum from its root directory without installing it on your
|
||||
Electrum from its root directory, without installing it on your
|
||||
system; all the python dependencies are included in the 'packages'
|
||||
directory. To run Electrum from its root directory, just do::
|
||||
|
||||
./run_electrum
|
||||
./electrum
|
||||
|
||||
You can also install Electrum on your system, by running this command::
|
||||
|
||||
sudo apt-get install python3-setuptools
|
||||
python3 -m pip install .[fast]
|
||||
python setup.py install
|
||||
|
||||
This will download and install the Python dependencies used by
|
||||
Electrum instead of using the 'packages' directory.
|
||||
The 'fast' extra contains some optional dependencies that we think
|
||||
are often useful but they are not strictly needed.
|
||||
Electrum, instead of using the 'packages' directory.
|
||||
|
||||
If you cloned the git repository, you need to compile extra files
|
||||
before you can run Electrum. Read the next section, "Development
|
||||
@ -57,24 +48,28 @@ Version".
|
||||
Development version
|
||||
===================
|
||||
|
||||
Check out the code from GitHub::
|
||||
Check out the code from Github::
|
||||
|
||||
git clone git://github.com/spesmilo/electrum.git
|
||||
cd electrum
|
||||
|
||||
Run install (this should install dependencies)::
|
||||
|
||||
python3 -m pip install .[fast]
|
||||
python setup.py install
|
||||
|
||||
Compile the icons file for Qt::
|
||||
|
||||
sudo apt-get install pyqt4-dev-tools
|
||||
pyrcc4 icons.qrc -o gui/qt/icons_rc.py
|
||||
|
||||
Compile the protobuf description file::
|
||||
|
||||
sudo apt-get install protobuf-compiler
|
||||
protoc --proto_path=electrum --python_out=electrum electrum/paymentrequest.proto
|
||||
protoc --proto_path=lib/ --python_out=lib/ lib/paymentrequest.proto
|
||||
|
||||
Create translations (optional)::
|
||||
|
||||
sudo apt-get install python-requests gettext
|
||||
sudo apt-get install python-pycurl gettext
|
||||
./contrib/make_locale
|
||||
|
||||
|
||||
@ -83,25 +78,33 @@ Create translations (optional)::
|
||||
Creating Binaries
|
||||
=================
|
||||
|
||||
Linux
|
||||
-----
|
||||
|
||||
See :code:`contrib/build-linux/README.md`.
|
||||
To create binaries, create the 'packages' directory::
|
||||
|
||||
./contrib/make_packages
|
||||
|
||||
Mac OS X / macOS
|
||||
----------------
|
||||
This directory contains the python dependencies used by Electrum.
|
||||
|
||||
See :code:`contrib/osx/README.md`.
|
||||
Mac OS X
|
||||
--------
|
||||
|
||||
::
|
||||
|
||||
# On MacPorts installs:
|
||||
sudo python setup-release.py py2app
|
||||
|
||||
# On Homebrew installs:
|
||||
ARCHFLAGS="-arch i386 -arch x86_64" sudo python setup-release.py py2app --includes sip
|
||||
|
||||
sudo hdiutil create -fs HFS+ -volname "Electrum" -srcfolder dist/Electrum.app dist/electrum-VERSION-macosx.dmg
|
||||
|
||||
Windows
|
||||
-------
|
||||
|
||||
See :code:`contrib/build-wine/docker/README.md`.
|
||||
See `contrib/build-wine/README` file.
|
||||
|
||||
|
||||
Android
|
||||
-------
|
||||
|
||||
See :code:`electrum/gui/kivy/Readme.md`.
|
||||
See `gui/kivy/Readme.txt` file.
|
||||
|
||||
443
RELEASE-NOTES
@ -1,432 +1,9 @@
|
||||
# Release 3.3.4 - (February 13, 2019)
|
||||
|
||||
* AppImage: we now also distribute self-contained binaries for x86_64
|
||||
Linux in the form of an AppImage (#5042). The Python interpreter,
|
||||
PyQt5, libsecp256k1, PyCryptodomex, zbar, hidapi/libusb (including
|
||||
hardware wallet libraries) are all bundled. Note that users of
|
||||
hw wallets still need to set udev rules themselves.
|
||||
* hw wallets: fix a regression during transaction signing that prompts
|
||||
the user too many times for confirmations (commit 2729909)
|
||||
* transactions now set nVersion to 2, to mimic Bitcoin Core
|
||||
* fix Qt bug that made all hw wallets unusable on Windows 8.1 (#4960)
|
||||
* fix bugs in wallet creation wizard that resulted in corrupted
|
||||
wallets being created in rare cases (#5082, #5057)
|
||||
* fix compatibility with Qt 5.12 (#5109)
|
||||
|
||||
|
||||
# Release 3.3.3 - (January 25, 2019)
|
||||
|
||||
* Do not expose users to server error messages (#4968)
|
||||
* Notify users of new releases. Release announcements must be signed,
|
||||
and they are verified byElectrum using a hardcoded Bitcoin address.
|
||||
* Hardware wallet fixes (#4991, #4993, #5006)
|
||||
* Display only QR code in QRcode Window
|
||||
* Fixed code signing on MacOS
|
||||
* Randomise locktime of transactions
|
||||
|
||||
|
||||
# Release 3.3.2 - (December 21, 2018)
|
||||
|
||||
* Fix Qt history export bug
|
||||
* Improve network timeouts
|
||||
* Prepend server transaction_broadcast error messages with
|
||||
explanatory message. Render error messages as plain text.
|
||||
|
||||
|
||||
# Release 3.3.1 - (December 20, 2018)
|
||||
|
||||
* Qt: Fix invoices tab crash (#4941)
|
||||
* Android: Minor GUI improvements
|
||||
|
||||
|
||||
# Release 3.3.0 - Hodler's Edition (December 19, 2018)
|
||||
|
||||
* The network layer has been rewritten using asyncio and aiorpcx.
|
||||
In addition to easier maintenance, this makes the client
|
||||
more robust against misbehaving servers.
|
||||
* The minimum python version was increased to 3.6
|
||||
* The blockchain headers and fork handling logic has been generalized.
|
||||
Clients by default now follow chain based on most work, not length.
|
||||
* New wallet creation defaults to native segwit (bech32).
|
||||
* Segwit 2FA: TrustedCoin now supports native segwit p2wsh
|
||||
two-factor wallets.
|
||||
* RBF batching (opt-in): If the wallet has an unconfirmed RBF
|
||||
transaction, new payments will be added to that transaction,
|
||||
instead of creating new transactions.
|
||||
* MacOS: support QR code scanner in binaries.
|
||||
* Android APK:
|
||||
- build using Google NDK instead of Crystax NDK
|
||||
- target API 28
|
||||
- do not use external storage (previously for block headers)
|
||||
* hardware wallets:
|
||||
- Coldcard now supports spending from p2wpkh-p2sh,
|
||||
fixed p2pkh signing for fw 1.1.0
|
||||
- Archos Safe-T mini: fix #4726 signing issue
|
||||
- KeepKey: full segwit support
|
||||
- Trezor: refactoring and compat with python-trezor 0.11
|
||||
- Digital BitBox: support firmware v5.0.0
|
||||
* fix bitcoin URI handling when app already running (#4796)
|
||||
* Qt listings rewritten:
|
||||
the History tab now uses QAbstractItemModel, the other tabs use
|
||||
QStandardItemModel. Performance should be better for large wallets.
|
||||
* Several other minor bugfixes and usability improvements.
|
||||
|
||||
|
||||
# Release 3.2.3 - (September 3, 2018)
|
||||
|
||||
* hardware wallet: the Safe-T mini from Archos is now supported.
|
||||
* hardware wallet: the Coldcard from Coinkite is now supported.
|
||||
* BIP39 seeds: if a seed extension (aka passphrase) contained
|
||||
multiple consecutive whitespaces or leading/trailing whitespaces
|
||||
then the derived addresses were not following spec. This has been
|
||||
fixed, and affected should move their coins. The wizard will show a
|
||||
warning in this case. (#4566)
|
||||
* Revealer: the PRNG used has been changed (#4649)
|
||||
* fix Linux distributables: 'typing' was not bundled, needed for python 3.4
|
||||
* fix #4626: fix spending from segwit multisig wallets involving a Trezor
|
||||
cosigner when using a custom derivation path
|
||||
* fix #4491: on Android, if user had set "uBTC" as base unit, app crashed
|
||||
* fix #4497: on Android, paying bip70 invoices from cold start did not work
|
||||
* Several other minor bugfixes and usability improvements.
|
||||
|
||||
|
||||
# Release 3.2.2 - (July 2nd, 2018)
|
||||
|
||||
* Fix DNS resolution on Windows
|
||||
* Fix websocket bug in daemon
|
||||
|
||||
|
||||
# Release 3.2.1 - (July 1st, 2018)
|
||||
|
||||
* fix Windows binaries: due to build process changes, the locale files
|
||||
were not included; the language could not be changed from English
|
||||
* fix Linux distributables: wordlists were not included (#4475)
|
||||
|
||||
|
||||
# Release 3.2.0 - Satoshi's Vision (June 30, 2018)
|
||||
|
||||
* If present, libsecp256k1 is used to speed up elliptic curve
|
||||
operations. The library is bundled in the Windows, MacOS, and
|
||||
Android binaries. On Linux, it needs to be installed separately.
|
||||
* Two-factor authentication is available on Android. Note that this
|
||||
will only provide additional security if one time passwords are
|
||||
generated on a separate device.
|
||||
* Semi-automated crash reporting is implemented for Android.
|
||||
* Transactions that are dropped from the mempool are kept in the
|
||||
wallet as 'local', and can be rebroadcast. Previously these
|
||||
transactions were deleted from the wallet.
|
||||
* The scriptSig and witness part of transaction inputs are no longer
|
||||
parsed, unless actually needed. The wallet will no longer display
|
||||
'from' addresses corresponding to transaction inputs, except for
|
||||
its own inputs.
|
||||
* The partial transaction format has been incompatibly changed. This
|
||||
was needed as for partial transactions the scriptSig/witness has to
|
||||
be parsed, but for signed transactions we did not want to do the
|
||||
parsing. Users should make sure that all instances of Electrum
|
||||
they use to co-sign or offline sign, are updated together.
|
||||
* Signing of partial transactions created with online imported
|
||||
addresses wallets now supports significantly more
|
||||
setups. Previously only online p2pkh address + offline WIF was
|
||||
supported. Now the following setups are all supported:
|
||||
- online {p2pkh, p2wpkh-p2sh, p2wpkh} address + offline WIF,
|
||||
- online {p2pkh, p2wpkh-p2sh, p2wpkh} address + offline seed/xprv,
|
||||
- online {p2sh, p2wsh-p2sh, p2wsh}-multisig address + offline seeds/xprvs
|
||||
(potentially distributed among several different machines)
|
||||
Note that for the online address + offline HD secret case, you need
|
||||
the offline wallet to recognize the address (i.e. within gap
|
||||
limit). Having an xpub on the online machine is still the
|
||||
recommended setup, as this allows the online machine to generate
|
||||
new addresses on demand.
|
||||
* Segwit multisig for bip39 and hardware wallets is now enabled.
|
||||
(both p2wsh-p2sh and native p2wsh)
|
||||
* Ledger: offline signing for segwit inputs (#3302) This has already
|
||||
worked for Trezor and Digital Bitbox. Offline segwit signing can be
|
||||
combined with online imported addresses wallets.
|
||||
* Added Revealer plugin. ( https://revealer.cc ) Revealer is a seed
|
||||
phrase back-up solution. It allows you to create a cold, analog,
|
||||
multi-factor backup of your wallet seeds, or of any arbitrary
|
||||
secret. The Revealer utilizes a transparent plastic visual one time
|
||||
pad.
|
||||
* Fractional fee rates: the Qt GUI now displays fee rates with 0.1
|
||||
sat/byte precision, and also allows this same resolution in the
|
||||
Send tab.
|
||||
* Hardware wallets: a "show address" button is now displayed in the
|
||||
Receive tab of the Qt GUI. (#4316)
|
||||
* Trezor One: implemented advanced/matrix recovery (#4329)
|
||||
* Qt/Kivy: added "sat" as optional base unit.
|
||||
* Kivy GUI: significant performance improvements when displaying
|
||||
history and address list of large wallets; and transaction dialog
|
||||
of large transactions.
|
||||
* Windows: use dnspython to resolve dns instead of socket.getaddrinfo
|
||||
(#4422)
|
||||
* Importing minikeys: use uncompressed pubkey instead of compressed
|
||||
(#4384)
|
||||
* SPV proofs: check inner nodes not to be valid transactions (#4436)
|
||||
* Qt GUI: there is now an optional "dark" theme (#4461)
|
||||
* Several other minor bugfixes and usability improvements.
|
||||
|
||||
|
||||
# Release 3.1.3 - (April 16, 2018)
|
||||
|
||||
* Qt GUI: seed word auto-complete during restore
|
||||
* Android: fix some crashes
|
||||
* performance improvements (wallet, and Qt GUI)
|
||||
* hardware wallets: show debug message during device scan
|
||||
* Digital Bitbox: enabled BIP84 (p2wpkh) wallet creation
|
||||
* add regtest support (via --regtest flag)
|
||||
* other minor bugfixes and usability improvements
|
||||
|
||||
# Release 3.1.2 - (March 28, 2018)
|
||||
|
||||
* Kivy/android: request PIN on startup
|
||||
* Improve OSX build process
|
||||
* Fix various bugs with hardware wallets
|
||||
* Other minor bugfixes
|
||||
|
||||
# Release 3.1.1 - (March 12, 2018)
|
||||
|
||||
* fix #4031: Trezor T support
|
||||
* partial fix #4060: proxy and hardware wallet can't be used together
|
||||
* fix #4039: can't set address labels
|
||||
* fix crash related to coinbase transactions
|
||||
* MacOS: use internal graphics card
|
||||
* fix openalias related crashes
|
||||
* speed-up capital gains calculations
|
||||
* hw wallet encryption: re-prompt for passphrase if incorrect
|
||||
* other minor fixes.
|
||||
|
||||
|
||||
|
||||
# Release 3.1.0 - (March 5, 2018)
|
||||
|
||||
* Memory-pool based fee estimation. Dynamic fees can target a desired
|
||||
depth in the memory pool. This feature is optional, and ETA-based
|
||||
estimates from Bitcoin Core are still available. Note that miners
|
||||
could exploit this feature, if they conspired and filled the memory
|
||||
pool with expensive transactions that never get mined. However,
|
||||
since the Electrum client already trusts an Electrum server with
|
||||
fee estimates, activating this feature does not introduce any new
|
||||
vulnerability. In addition, the client uses a hard threshold to
|
||||
protect itself from servers sending excessive fee estimates. In
|
||||
practice, ETA-based estimates have resulted in sticky fees, and
|
||||
caused many users to overpay for transactions. Advanced users tend
|
||||
to visit (and trust) websites that display memory-pool data in
|
||||
order to set their fees.
|
||||
* Capital gains: For each outgoing transaction, the difference
|
||||
between the acquisition and liquidation prices of outgoing coins is
|
||||
displayed in the wallet history. By default, historical exchange
|
||||
rates are used to compute acquisition and liquidation prices. These
|
||||
values can also be entered manually, in order to match the actual
|
||||
price realized by the user. The order of liquidation of coins is
|
||||
the natural order defined by the blockchain; this results in
|
||||
capital gain values that are invariant to changes in the set of
|
||||
addresses that are in the wallet. Any other ordering strategy (such
|
||||
as FIFO, LIFO) would result in capital gain values that depend on
|
||||
the presence of other addresses in the wallet.
|
||||
* Local transactions: Transactions can be saved in the wallet without
|
||||
being broadcast. The inputs of local transactions are considered as
|
||||
spent, and their change outputs can be re-used in subsequent
|
||||
transactions. This can be combined with cold storage, in order to
|
||||
create several transactions before broadcasting them. Outgoing
|
||||
transactions that have been removed from the memory pool are also
|
||||
saved in the wallet, and can be broadcast again.
|
||||
* Checkpoints: The initial download of a headers file was replaced
|
||||
with hardcoded checkpoints. The wallet uses one checkpoint per
|
||||
retargeting period. The headers for a retargeting period are
|
||||
downloaded only if transactions need to be verified in this period.
|
||||
* The 'privacy' and 'priority' coin selection policies have been
|
||||
merged into one. Previously, the 'privacy' policy has been unusable
|
||||
because it was was not prioritizing confirmed coins. The new policy
|
||||
is similar to 'privacy', except that it de-prioritizes addresses
|
||||
that have unconfirmed coins.
|
||||
* The 'Send' tab of the Qt GUI displays how transaction fees are
|
||||
computed from transaction size.
|
||||
* The wallet history can be filtered by time interval.
|
||||
* Replace-by-fee is enabled by default. Note that this might cause
|
||||
some issues with wallets that do not display RBF transactions until
|
||||
they are confirmed.
|
||||
* Watching-only wallets and hardware wallets can be encrypted.
|
||||
* Semi-automated crash reporting
|
||||
* The SSL checkbox option was removed from the GUI.
|
||||
* The Trezor T hardware wallet is now supported.
|
||||
* BIP84: native segwit p2wpkh scripts for bip39 seeds and hardware
|
||||
wallets can now be created when specifying a BIP84 derivation
|
||||
path. This is usable with Trezor and Ledger.
|
||||
* Windows: the binaries now include ZBar, and QR code scanning should work.
|
||||
* The Wallet Import Format (WIF) for private keys that was extended in 3.0
|
||||
is changed. Keys in the previous format can be imported, compatibility
|
||||
is maintained. Newly exported keys will be serialized as
|
||||
"script_type:original_wif_format_key".
|
||||
* BIP32 master keys for testnet once again have different version bytes than
|
||||
on mainnet. For the mainnet prefixes {x,y,Y,z,Z}|{pub,prv}, the
|
||||
corresponding testnet prefixes are {t,u,U,v,V}|{pub,prv}.
|
||||
More details and exact version bytes are specified at:
|
||||
https://github.com/spesmilo/electrum-docs/blob/master/xpub_version_bytes.rst
|
||||
Note that due to this change, testnet wallet files created with previous
|
||||
versions of Electrum must be considered broken, and they need to be
|
||||
recreated from seed words.
|
||||
* A new version of the Electrum protocol is required by the client
|
||||
(version 1.2). Servers using older versions of the protocol will
|
||||
not be displayed in the GUI.
|
||||
|
||||
|
||||
# Release 3.0.6 :
|
||||
* Fix transaction parsing bug #3788
|
||||
|
||||
# Release 3.0.5 : (Security update)
|
||||
|
||||
This is a follow-up to the 3.0.4 release, which did not completely fix
|
||||
issue #3374. Users should upgrade to 3.0.5.
|
||||
|
||||
* The JSONRPC interface is password protected
|
||||
* JSONRPC commands are disabled if the GUI is running, except 'ping',
|
||||
which is used to determine if a GUI is already running
|
||||
|
||||
|
||||
# Release 3.0.4 : (Security update)
|
||||
|
||||
* Fix a vulnerability caused by Cross-Origin Resource Sharing (CORS)
|
||||
in the JSONRPC interface. Previous versions of Electrum are
|
||||
vulnerable to port scanning and deanonimization attacks from
|
||||
malicious websites. Wallets that are not password-protected are
|
||||
vulnerable to theft.
|
||||
* Bundle QR scanner with Android app
|
||||
* Minor bug fixes
|
||||
|
||||
# Release 3.0.3
|
||||
* Qt GUI: sweeping now uses the Send tab, allowing fees to be set
|
||||
* Windows: if using the installer binary, there is now a separate shortcut
|
||||
for "Electrum Testnet"
|
||||
* Digital Bitbox: added support for p2sh-segwit
|
||||
* OS notifications for incoming transactions
|
||||
* better transaction size estimation:
|
||||
- fees for segwit txns were somewhat underestimated (#3347)
|
||||
- some multisig txns were underestimated
|
||||
- handle uncompressed pubkeys
|
||||
* fix #3321: testnet for Windows binaries
|
||||
* fix #3264: Ledger/dbb signing on some platforms
|
||||
* fix #3407: KeepKey sending to p2sh output
|
||||
* other minor fixes and usability improvements
|
||||
|
||||
# Release 3.0.2
|
||||
* Android: replace requests tab with address tab, with access to
|
||||
private keys
|
||||
* sweeping minikeys: search for both compressed and uncompressed
|
||||
pubkeys
|
||||
* fix wizard crash when attempting to reset Google Authenticator
|
||||
* fix #3248: fix Ledger+segwit signing
|
||||
* fix #3262: fix SSL payment request signing
|
||||
* other minor fixes.
|
||||
|
||||
# Release 3.0.1
|
||||
* minor bug and usability fixes
|
||||
|
||||
# Release 3.0 - Uncanny Valley (November 1st, 2017)
|
||||
|
||||
* The project was migrated to Python3 and Qt5. Python2 is no longer
|
||||
supported. If you cloned the source repository, you will need to
|
||||
run "python3 setup.py install" in order to install the new
|
||||
dependencies.
|
||||
|
||||
* Segwit support:
|
||||
|
||||
- Native segwit scripts are supported using a new type of
|
||||
seed. The version number for segwit seeds is 0x100. The install
|
||||
wizard will not create segwit seeds by default; users must
|
||||
opt-in with the segwit option.
|
||||
|
||||
- Native segwit scripts are represented using bech32 addresses,
|
||||
following BIP173. Please note that BIP173 is still in draft
|
||||
status, and that other wallets/websites may not support
|
||||
it. Thus, you should keep a non-segwit wallet in order to be
|
||||
able to receive bitcoins during the transition period. If BIP173
|
||||
ends up being rejected or substantially modified, your wallet
|
||||
may have to be restored from seed. This will not affect funds
|
||||
sent to bech32 addresses, and it will not affect the capacity of
|
||||
Electrum to spend these funds.
|
||||
|
||||
- Segwit scripts embedded in p2sh are supported with hardware
|
||||
wallets or bip39 seeds. To create a segwit-in-p2sh wallet,
|
||||
trezor/ledger users will need to enter a BIP49 derivation path.
|
||||
|
||||
- The BIP32 master keys of segwit wallets are serialized using new
|
||||
version numbers. The new version numbers encode the script type,
|
||||
and they result in the following prefixes:
|
||||
|
||||
* xpub/xprv : p2pkh or p2sh
|
||||
* ypub/yprv : p2wpkh-in-p2sh
|
||||
* Ypub/Yprv : p2wsh-in-p2sh
|
||||
* zpub/zprv : p2wpkh
|
||||
* Zpub/Zprv : p2wsh
|
||||
|
||||
These values are identical for mainnet and testnet; tpub/tprv
|
||||
prefixes are no longer used in testnet wallets.
|
||||
|
||||
- The Wallet Import Format (WIF) is similarly extended for segwit
|
||||
scripts. After a base58-encoded key is decoded to binary, its
|
||||
first byte encodes the script type:
|
||||
|
||||
* 128 + 0: p2pkh
|
||||
* 128 + 1: p2wpkh
|
||||
* 128 + 2: p2wpkh-in-p2sh
|
||||
* 128 + 5: p2sh
|
||||
* 128 + 6: p2wsh
|
||||
* 128 + 7: p2wsh-in-p2sh
|
||||
|
||||
The distinction between p2sh and p2pkh in private key means that
|
||||
it is not possible to import a p2sh private key and associate it
|
||||
to a p2pkh address.
|
||||
|
||||
* A new version of the Electrum protocol is required by the client
|
||||
(version 1.1). Servers using older versions of the protocol will
|
||||
not be displayed in the GUI.
|
||||
|
||||
* By default, transactions are time-locked to the height of the
|
||||
current block. Other values of locktime may be passed using the
|
||||
command line.
|
||||
|
||||
|
||||
# Release 2.9.3
|
||||
* fix configuration file issue #2719
|
||||
* fix ledger signing of non-RBF transactions
|
||||
* disable 'spend confirmed only' option by default
|
||||
|
||||
# Release 2.9.2
|
||||
* force headers download if headers file is corrupted
|
||||
* add websocket to windows builds
|
||||
|
||||
# Release 2.9.1
|
||||
* fix initial headers download
|
||||
* validate contacts on import
|
||||
* command-line option for locktime
|
||||
|
||||
# Release 2.9 - Independence (July 27th, 2017)
|
||||
* Multiple Chain Validation: Electrum will download and validate
|
||||
block headers sent by servers that may follow different branches
|
||||
of a fork in the Bitcoin blockchain. Instead of a linear sequence,
|
||||
block headers are organized in a tree structure. Branching points
|
||||
are located efficiently using binary search. The purpose of MCV is
|
||||
to detect and handle blockchain forks that are invisible to the
|
||||
classical SPV model.
|
||||
* The desired branch of a blockchain fork can be selected using the
|
||||
network dialog. Branches are identified by the hash and height of
|
||||
the diverging block. Coin splitting is possible using RBF
|
||||
transaction (a tutorial will be added).
|
||||
* Multibit support: If the user enters a BIP39 seed (or uses a
|
||||
hardware wallet), the full derivation path is configurable in the
|
||||
install wizard.
|
||||
* Option to send only confirmed coins
|
||||
* Qt GUI:
|
||||
- Network dialog uses tabs and gets updated by network events.
|
||||
- The gui tabs use icons
|
||||
* Kivy GUI:
|
||||
- separation between network dialog and wallet settings dialog.
|
||||
- option for manual server entry
|
||||
- proxy configuration
|
||||
* Daemon: The wallet password can be passed as parameter to the
|
||||
JSONRPC API.
|
||||
* Various other bugfixes and improvements.
|
||||
|
||||
# Release 2.9 - Independence
|
||||
* Blockchain fork detection and management:
|
||||
- The SPV module will download and verify block headers from
|
||||
multiple branches
|
||||
- Branching points are located using binary search
|
||||
- The desired branch of a fork can be selected using the network dialog
|
||||
|
||||
# Release 2.8.3
|
||||
* Fix crash on reading older wallet formats.
|
||||
@ -525,7 +102,7 @@ issue #3374. Users should upgrade to 3.0.5.
|
||||
|
||||
# Release 2.7.7
|
||||
* Fix utf8 encoding bug with old wallet seeds (issue #1967)
|
||||
* Fix delete request from menu (issue #1968)
|
||||
* Fix delete request from menu (isue #1968)
|
||||
|
||||
# Release 2.7.6
|
||||
* Fixes a critical bug with imported private keys (issue #1966). Keys
|
||||
@ -888,7 +465,7 @@ issue #3374. Users should upgrade to 3.0.5.
|
||||
* New 'Receive' tab in the GUI:
|
||||
- create and manage payment requests, with QR Codes
|
||||
- the former 'Receive' tab was renamed to 'Addresses'
|
||||
- the former Point of Sale plugin is replaced by a resizable
|
||||
- the former Point of Sale plugin is replaced by a resizeable
|
||||
window that pops up if you click on the QR code
|
||||
|
||||
* The 'Send' tab in the Qt GUI supports transactions with multiple
|
||||
@ -911,7 +488,7 @@ issue #3374. Users should upgrade to 3.0.5.
|
||||
|
||||
* The client accepts servers with a CA-signed SSL certificate.
|
||||
|
||||
* ECIES encrypt/decrypt methods, available in the GUI and using
|
||||
* ECIES encrypt/decrypt methods, availabe in the GUI and using
|
||||
the command line:
|
||||
encrypt <pubkey> <message>
|
||||
decrypt <pubkey> <message>
|
||||
@ -984,7 +561,7 @@ bugfixes: connection problems, transactions staying unverified
|
||||
|
||||
# Release 1.8.1
|
||||
|
||||
* Notification option when receiving new transactions
|
||||
* Notification option when receiving new tranactions
|
||||
* Confirm dialogue before sending large amounts
|
||||
* Alternative datafile location for non-windows systems
|
||||
* Fix offline wallet creation
|
||||
|
||||
29
app.fil
Normal file
@ -0,0 +1,29 @@
|
||||
gui/qt/__init__.py
|
||||
gui/qt/main_window.py
|
||||
gui/qt/history_list.py
|
||||
gui/qt/contact_list.py
|
||||
gui/qt/invoice_list.py
|
||||
gui/qt/request_list.py
|
||||
gui/qt/installwizard.py
|
||||
gui/qt/network_dialog.py
|
||||
gui/qt/password_dialog.py
|
||||
gui/qt/util.py
|
||||
gui/qt/seed_dialog.py
|
||||
gui/qt/transaction_dialog.py
|
||||
gui/qt/address_dialog.py
|
||||
gui/qt/qrcodewidget.py
|
||||
gui/qt/qrtextedit.py
|
||||
gui/qt/qrwindow.py
|
||||
gui/kivy/main.kv
|
||||
gui/kivy/main_window.py
|
||||
gui/kivy/uix/dialogs/__init__.py
|
||||
gui/kivy/uix/dialogs/fee_dialog.py
|
||||
gui/kivy/uix/dialogs/installwizard.py
|
||||
gui/kivy/uix/dialogs/settings.py
|
||||
gui/kivy/uix/dialogs/wallets.py
|
||||
gui/kivy/uix/ui_screens/history.kv
|
||||
gui/kivy/uix/ui_screens/receive.kv
|
||||
gui/kivy/uix/ui_screens/send.kv
|
||||
plugins/labels/qt.py
|
||||
plugins/trezor/qt.py
|
||||
plugins/virtualkeyboard/qt.py
|
||||
@ -1,20 +0,0 @@
|
||||
Source tarballs
|
||||
===============
|
||||
|
||||
1. Build locale files
|
||||
|
||||
```
|
||||
contrib/make_locale
|
||||
```
|
||||
|
||||
2. Prepare python dependencies used by Electrum.
|
||||
|
||||
```
|
||||
contrib/make_packages
|
||||
```
|
||||
|
||||
3. Create source tarball.
|
||||
|
||||
```
|
||||
contrib/make_tgz
|
||||
```
|
||||
@ -1,25 +0,0 @@
|
||||
FROM ubuntu:14.04@sha256:cac55e5d97fad634d954d00a5c2a56d80576a08dcc01036011f26b88263f1578
|
||||
|
||||
ENV LC_ALL=C.UTF-8 LANG=C.UTF-8
|
||||
|
||||
RUN apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
git \
|
||||
wget \
|
||||
make \
|
||||
autotools-dev \
|
||||
autoconf \
|
||||
libtool \
|
||||
xz-utils \
|
||||
libssl-dev \
|
||||
zlib1g-dev \
|
||||
libffi6 \
|
||||
libffi-dev \
|
||||
libusb-1.0-0-dev \
|
||||
libudev-dev \
|
||||
gettext \
|
||||
libzbar0 \
|
||||
&& \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
apt-get autoremove -y && \
|
||||
apt-get clean
|
||||
@ -1,41 +0,0 @@
|
||||
AppImage binary for Electrum
|
||||
============================
|
||||
|
||||
This assumes an Ubuntu host, but it should not be too hard to adapt to another
|
||||
similar system. The docker commands should be executed in the project's root
|
||||
folder.
|
||||
|
||||
1. Install Docker
|
||||
|
||||
```
|
||||
$ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
|
||||
$ sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
$ sudo apt-get update
|
||||
$ sudo apt-get install -y docker-ce
|
||||
```
|
||||
|
||||
2. Build image
|
||||
|
||||
```
|
||||
$ sudo docker build --no-cache -t electrum-appimage-builder-img contrib/build-linux/appimage
|
||||
```
|
||||
|
||||
3. Build binary
|
||||
|
||||
```
|
||||
$ sudo docker run -it \
|
||||
--name electrum-appimage-builder-cont \
|
||||
-v $PWD:/opt/electrum \
|
||||
--rm \
|
||||
--workdir /opt/electrum/contrib/build-linux/appimage \
|
||||
electrum-appimage-builder-img \
|
||||
./build.sh
|
||||
```
|
||||
|
||||
4. The generated binary is in `./dist`.
|
||||
|
||||
|
||||
## FAQ
|
||||
|
||||
### How can I see what is included in the AppImage?
|
||||
Execute the binary as follows: `./electrum*.AppImage --appimage-extract`
|
||||
@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
APPDIR="$(dirname "$(readlink -e "$0")")"
|
||||
|
||||
export LD_LIBRARY_PATH="${APPDIR}/usr/lib/:${APPDIR}/usr/lib/x86_64-linux-gnu${LD_LIBRARY_PATH+:$LD_LIBRARY_PATH}"
|
||||
export PATH="${APPDIR}/usr/bin:${PATH}"
|
||||
export LDFLAGS="-L${APPDIR}/usr/lib/x86_64-linux-gnu -L${APPDIR}/usr/lib"
|
||||
|
||||
exec "${APPDIR}/usr/bin/python3.6" -s "${APPDIR}/usr/bin/electrum" "$@"
|
||||
@ -1,197 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
PROJECT_ROOT="$(dirname "$(readlink -e "$0")")/../../.."
|
||||
CONTRIB="$PROJECT_ROOT/contrib"
|
||||
DISTDIR="$PROJECT_ROOT/dist"
|
||||
BUILDDIR="$CONTRIB/build-linux/appimage/build/appimage"
|
||||
APPDIR="$BUILDDIR/electrum.AppDir"
|
||||
CACHEDIR="$CONTRIB/build-linux/appimage/.cache/appimage"
|
||||
|
||||
# pinned versions
|
||||
PYTHON_VERSION=3.6.8
|
||||
PKG2APPIMAGE_COMMIT="83483c2971fcaa1cb0c1253acd6c731ef8404381"
|
||||
LIBSECP_VERSION="b408c6a8b287003d1ade5709e6f7bc3c7f1d5be7"
|
||||
|
||||
|
||||
VERSION=`git describe --tags --dirty --always`
|
||||
APPIMAGE="$DISTDIR/electrum-$VERSION-x86_64.AppImage"
|
||||
|
||||
rm -rf "$BUILDDIR"
|
||||
mkdir -p "$APPDIR" "$CACHEDIR" "$DISTDIR"
|
||||
|
||||
|
||||
. "$CONTRIB"/build_tools_util.sh
|
||||
|
||||
|
||||
info "downloading some dependencies."
|
||||
download_if_not_exist "$CACHEDIR/functions.sh" "https://raw.githubusercontent.com/AppImage/pkg2appimage/$PKG2APPIMAGE_COMMIT/functions.sh"
|
||||
verify_hash "$CACHEDIR/functions.sh" "a73a21a6c1d1e15c0a9f47f017ae833873d1dc6aa74a4c840c0b901bf1dcf09c"
|
||||
|
||||
download_if_not_exist "$CACHEDIR/appimagetool" "https://github.com/probonopd/AppImageKit/releases/download/11/appimagetool-x86_64.AppImage"
|
||||
verify_hash "$CACHEDIR/appimagetool" "c13026b9ebaa20a17e7e0a4c818a901f0faba759801d8ceab3bb6007dde00372"
|
||||
|
||||
download_if_not_exist "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" "https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tar.xz"
|
||||
verify_hash "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" "35446241e995773b1bed7d196f4b624dadcadc8429f26282e756b2fb8a351193"
|
||||
|
||||
|
||||
|
||||
info "building python."
|
||||
tar xf "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" -C "$BUILDDIR"
|
||||
(
|
||||
cd "$BUILDDIR/Python-$PYTHON_VERSION"
|
||||
export SOURCE_DATE_EPOCH=1530212462
|
||||
./configure \
|
||||
--cache-file="$CACHEDIR/python.config.cache" \
|
||||
--prefix="$APPDIR/usr" \
|
||||
--enable-ipv6 \
|
||||
--enable-shared \
|
||||
--with-threads \
|
||||
-q
|
||||
make -s
|
||||
make -s install > /dev/null
|
||||
)
|
||||
|
||||
|
||||
info "building libsecp256k1."
|
||||
(
|
||||
git clone https://github.com/bitcoin-core/secp256k1 "$CACHEDIR"/secp256k1 || (cd "$CACHEDIR"/secp256k1 && git pull)
|
||||
cd "$CACHEDIR"/secp256k1
|
||||
git reset --hard "$LIBSECP_VERSION"
|
||||
git clean -f -x -q
|
||||
export SOURCE_DATE_EPOCH=1530212462
|
||||
./autogen.sh
|
||||
echo "LDFLAGS = -no-undefined" >> Makefile.am
|
||||
./configure \
|
||||
--prefix="$APPDIR/usr" \
|
||||
--enable-module-recovery \
|
||||
--enable-experimental \
|
||||
--enable-module-ecdh \
|
||||
--disable-jni \
|
||||
-q
|
||||
make -s
|
||||
make -s install > /dev/null
|
||||
)
|
||||
|
||||
|
||||
appdir_python() {
|
||||
env \
|
||||
PYTHONNOUSERSITE=1 \
|
||||
LD_LIBRARY_PATH="$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu${LD_LIBRARY_PATH+:$LD_LIBRARY_PATH}" \
|
||||
"$APPDIR/usr/bin/python3.6" "$@"
|
||||
}
|
||||
|
||||
python='appdir_python'
|
||||
|
||||
|
||||
info "installing pip."
|
||||
"$python" -m ensurepip
|
||||
|
||||
|
||||
info "preparing electrum-locale."
|
||||
(
|
||||
cd "$PROJECT_ROOT"
|
||||
git submodule update --init
|
||||
|
||||
pushd "$CONTRIB"/deterministic-build/electrum-locale
|
||||
if ! which msgfmt > /dev/null 2>&1; then
|
||||
echo "Please install gettext"
|
||||
exit 1
|
||||
fi
|
||||
for i in ./locale/*; do
|
||||
dir="$PROJECT_ROOT/electrum/$i/LC_MESSAGES"
|
||||
mkdir -p $dir
|
||||
msgfmt --output-file="$dir/electrum.mo" "$i/electrum.po" || true
|
||||
done
|
||||
popd
|
||||
)
|
||||
|
||||
|
||||
info "installing electrum and its dependencies."
|
||||
mkdir -p "$CACHEDIR/pip_cache"
|
||||
"$python" -m pip install --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements.txt"
|
||||
"$python" -m pip install --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements-binaries.txt"
|
||||
"$python" -m pip install --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements-hw.txt"
|
||||
"$python" -m pip install --cache-dir "$CACHEDIR/pip_cache" "$PROJECT_ROOT"
|
||||
|
||||
|
||||
info "copying zbar"
|
||||
cp "/usr/lib/libzbar.so.0" "$APPDIR/usr/lib/libzbar.so.0"
|
||||
|
||||
|
||||
info "desktop integration."
|
||||
cp "$PROJECT_ROOT/electrum.desktop" "$APPDIR/electrum.desktop"
|
||||
cp "$PROJECT_ROOT/electrum/gui/icons/electrum.png" "$APPDIR/electrum.png"
|
||||
|
||||
|
||||
# add launcher
|
||||
cp "$CONTRIB/build-linux/appimage/apprun.sh" "$APPDIR/AppRun"
|
||||
|
||||
info "finalizing AppDir."
|
||||
(
|
||||
export PKG2AICOMMIT="$PKG2APPIMAGE_COMMIT"
|
||||
. "$CACHEDIR/functions.sh"
|
||||
|
||||
cd "$APPDIR"
|
||||
# copy system dependencies
|
||||
# note: temporarily move PyQt5 out of the way so
|
||||
# we don't try to bundle its system dependencies.
|
||||
mv "$APPDIR/usr/lib/python3.6/site-packages/PyQt5" "$BUILDDIR"
|
||||
copy_deps; copy_deps; copy_deps
|
||||
move_lib
|
||||
mv "$BUILDDIR/PyQt5" "$APPDIR/usr/lib/python3.6/site-packages"
|
||||
|
||||
# apply global appimage blacklist to exclude stuff
|
||||
# move usr/include out of the way to preserve usr/include/python3.6m.
|
||||
mv usr/include usr/include.tmp
|
||||
delete_blacklisted
|
||||
mv usr/include.tmp usr/include
|
||||
)
|
||||
|
||||
|
||||
info "stripping binaries from debug symbols."
|
||||
strip_binaries()
|
||||
{
|
||||
chmod u+w -R "$APPDIR"
|
||||
{
|
||||
printf '%s\0' "$APPDIR/usr/bin/python3.6"
|
||||
find "$APPDIR" -type f -regex '.*\.so\(\.[0-9.]+\)?$' -print0
|
||||
} | xargs -0 --no-run-if-empty --verbose -n1 strip
|
||||
}
|
||||
strip_binaries
|
||||
|
||||
remove_emptydirs()
|
||||
{
|
||||
find "$APPDIR" -type d -empty -print0 | xargs -0 --no-run-if-empty rmdir -vp --ignore-fail-on-non-empty
|
||||
}
|
||||
remove_emptydirs
|
||||
|
||||
|
||||
info "removing some unneeded stuff to decrease binary size."
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/test
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/config-3.6m-x86_64-linux-gnu
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/translations/qtwebengine_locales
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/resources/qtwebengine_*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/qml
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Web*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Designer*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Qml*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Quick*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Location*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Test*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Xml*
|
||||
|
||||
|
||||
info "creating the AppImage."
|
||||
(
|
||||
cd "$BUILDDIR"
|
||||
chmod +x "$CACHEDIR/appimagetool"
|
||||
"$CACHEDIR/appimagetool" --appimage-extract
|
||||
env VERSION="$VERSION" ./squashfs-root/AppRun --no-appstream --verbose "$APPDIR" "$APPIMAGE"
|
||||
)
|
||||
|
||||
|
||||
info "done."
|
||||
ls -la "$DISTDIR"
|
||||
sha256sum "$DISTDIR"/*
|
||||
12
contrib/build-wine/README
Normal file
@ -0,0 +1,12 @@
|
||||
These scripts can be used for cross-compilation of Windows Electrum executables from Linux/Wine.
|
||||
|
||||
Usage:
|
||||
1. Install Wine (tested with wine-1.7.18)
|
||||
2. Run "./prepare-wine.sh", it will download all dependencies. When you'll be asked, always leave default settings and press "Next >".
|
||||
3. Run "./prepare-hw.sh" to build support for hardware wallets (TREZOR)
|
||||
4. Run "./build-electrum-git.sh". Sources will be packed into three separate versions to dist/ directory:
|
||||
* Standalone compressed executable is "dist/electrum.exe"
|
||||
* Uncompressed binaries are in "dist/electrum". They're useful for comparsion with other builds.
|
||||
* NSIS-based installer of Electrum is "electrum-setup.exe"
|
||||
7. Everytime you want to rebuild new version of Electrum just change the path to ZIP file in "build-electrum.sh" and re-run the script.
|
||||
|
||||
@ -1,37 +0,0 @@
|
||||
Windows Binary Builds
|
||||
=====================
|
||||
|
||||
These scripts can be used for cross-compilation of Windows Electrum executables from Linux/Wine.
|
||||
|
||||
For reproducible builds, see the `docker` folder.
|
||||
|
||||
|
||||
Usage:
|
||||
|
||||
|
||||
1. Install the following dependencies:
|
||||
|
||||
- dirmngr
|
||||
- gpg
|
||||
- 7Zip
|
||||
- Wine (>= v2)
|
||||
- (and, for building libsecp256k1)
|
||||
- mingw-w64
|
||||
- autotools-dev
|
||||
- autoconf
|
||||
- libtool
|
||||
|
||||
|
||||
For example:
|
||||
|
||||
```
|
||||
$ sudo apt-get install wine-development dirmngr gnupg2 p7zip-full
|
||||
$ sudo apt-get install mingw-w64 autotools-dev autoconf libtool
|
||||
```
|
||||
|
||||
The binaries are also built by Travis CI, so if you are having problems,
|
||||
[that script](https://github.com/spesmilo/electrum/blob/master/.travis.yml) might help.
|
||||
|
||||
2. Make sure `/opt` is writable by the current user.
|
||||
3. Run `build.sh`.
|
||||
4. The generated binaries are in `./dist`.
|
||||
@ -1,13 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# You probably need to update only this link
|
||||
ELECTRUM_GIT_URL=git://github.com/spesmilo/electrum.git
|
||||
BRANCH=master
|
||||
NAME_ROOT=electrum
|
||||
|
||||
if [ "$#" -gt 0 ]; then
|
||||
BRANCH="$1"
|
||||
fi
|
||||
|
||||
# These settings probably don't need any change
|
||||
export WINEPREFIX=/opt/wine64
|
||||
export PYTHONDONTWRITEBYTECODE=1
|
||||
export PYTHONHASHSEED=22
|
||||
|
||||
PYHOME=c:/python3
|
||||
PYHOME=c:/python27
|
||||
PYTHON="wine $PYHOME/python.exe -OO -B"
|
||||
|
||||
|
||||
@ -15,63 +21,68 @@ PYTHON="wine $PYHOME/python.exe -OO -B"
|
||||
cd `dirname $0`
|
||||
set -e
|
||||
|
||||
mkdir -p tmp
|
||||
cd tmp
|
||||
|
||||
pushd $WINEPREFIX/drive_c/electrum
|
||||
if [ -d "electrum-git" ]; then
|
||||
# GIT repository found, update it
|
||||
echo "Pull"
|
||||
cd electrum-git
|
||||
git pull
|
||||
git checkout $BRANCH
|
||||
cd ..
|
||||
else
|
||||
# GIT repository not found, clone it
|
||||
echo "Clone"
|
||||
git clone -b $BRANCH $ELECTRUM_GIT_URL electrum-git
|
||||
fi
|
||||
|
||||
# Load electrum-locale for this release
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
VERSION=`git describe --tags --dirty --always`
|
||||
cd electrum-git
|
||||
VERSION=`git describe --tags`
|
||||
echo "Last commit: $VERSION"
|
||||
|
||||
pushd ./contrib/deterministic-build/electrum-locale
|
||||
if ! which msgfmt > /dev/null 2>&1; then
|
||||
echo "Please install gettext"
|
||||
exit 1
|
||||
fi
|
||||
for i in ./locale/*; do
|
||||
dir=$WINEPREFIX/drive_c/electrum/electrum/$i/LC_MESSAGES
|
||||
mkdir -p $dir
|
||||
msgfmt --output-file=$dir/electrum.mo $i/electrum.po || true
|
||||
done
|
||||
popd
|
||||
cd ..
|
||||
|
||||
find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
|
||||
popd
|
||||
rm -rf $WINEPREFIX/drive_c/electrum
|
||||
cp -r electrum-git $WINEPREFIX/drive_c/electrum
|
||||
cp electrum-git/LICENCE .
|
||||
|
||||
cp $WINEPREFIX/drive_c/electrum/LICENCE .
|
||||
# add python packages (built with make_packages)
|
||||
cp -r ../../../packages $WINEPREFIX/drive_c/electrum/
|
||||
|
||||
# Install frozen dependencies
|
||||
$PYTHON -m pip install -r ../../deterministic-build/requirements.txt
|
||||
# add locale dir
|
||||
cp -r ../../../lib/locale $WINEPREFIX/drive_c/electrum/lib/
|
||||
|
||||
|
||||
# Build Qt resources
|
||||
wine $WINEPREFIX/drive_c/Python27/Lib/site-packages/PyQt4/pyrcc4.exe C:/electrum/icons.qrc -o C:/electrum/lib/icons_rc.py
|
||||
wine $WINEPREFIX/drive_c/Python27/Lib/site-packages/PyQt4/pyrcc4.exe C:/electrum/icons.qrc -o C:/electrum/gui/qt/icons_rc.py
|
||||
|
||||
$PYTHON -m pip install -r ../../deterministic-build/requirements-hw.txt
|
||||
|
||||
pushd $WINEPREFIX/drive_c/electrum
|
||||
$PYTHON -m pip install .
|
||||
$PYTHON setup.py install
|
||||
popd
|
||||
|
||||
cd ..
|
||||
|
||||
rm -rf dist/
|
||||
|
||||
# build standalone and portable versions
|
||||
wine "$PYHOME/scripts/pyinstaller.exe" --noconfirm --ascii --clean --name $NAME_ROOT-$VERSION -w deterministic.spec
|
||||
|
||||
# set timestamps in dist, in order to make the installer reproducible
|
||||
pushd dist
|
||||
find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
|
||||
popd
|
||||
|
||||
# build standalone version
|
||||
wine "C:/python27/scripts/pyinstaller.exe" --noconfirm --ascii --name $NAME_ROOT-$VERSION.exe -w deterministic.spec
|
||||
# build NSIS installer
|
||||
# $VERSION could be passed to the electrum.nsi script, but this would require some rewriting in the script itself.
|
||||
# $VERSION could be passed to the electrum.nsi script, but this would require some rewriting in the script iself.
|
||||
wine "$WINEPREFIX/drive_c/Program Files (x86)/NSIS/makensis.exe" /DPRODUCT_VERSION=$VERSION electrum.nsi
|
||||
|
||||
cd dist
|
||||
mv electrum-setup.exe $NAME_ROOT-$VERSION-setup.exe
|
||||
cd ..
|
||||
|
||||
rm build/ -r
|
||||
|
||||
# build portable version
|
||||
cp portable.patch $WINEPREFIX/drive_c/electrum
|
||||
pushd $WINEPREFIX/drive_c/electrum
|
||||
patch < portable.patch
|
||||
popd
|
||||
wine "C:/python27/scripts/pyinstaller.exe" --noconfirm --ascii --name $NAME_ROOT-$VERSION-portable.exe -w deterministic.spec
|
||||
|
||||
echo "Done."
|
||||
sha256sum dist/electrum*exe
|
||||
|
||||
@ -1,41 +0,0 @@
|
||||
#!/bin/bash
|
||||
# heavily based on https://github.com/ofek/coincurve/blob/417e726f553460f88d7edfa5dc67bfda397c4e4a/.travis/build_windows_wheels.sh
|
||||
|
||||
set -e
|
||||
|
||||
build_dll() {
|
||||
#sudo apt-get install -y mingw-w64
|
||||
export SOURCE_DATE_EPOCH=1530212462
|
||||
./autogen.sh
|
||||
echo "LDFLAGS = -no-undefined" >> Makefile.am
|
||||
LDFLAGS="-Wl,--no-insert-timestamp" ./configure \
|
||||
--host=$1 \
|
||||
--enable-module-recovery \
|
||||
--enable-experimental \
|
||||
--enable-module-ecdh \
|
||||
--disable-jni
|
||||
make
|
||||
${1}-strip .libs/libsecp256k1-0.dll
|
||||
}
|
||||
|
||||
|
||||
cd /tmp/electrum-build
|
||||
|
||||
if [ ! -d secp256k1 ]; then
|
||||
git clone https://github.com/bitcoin-core/secp256k1.git
|
||||
cd secp256k1;
|
||||
else
|
||||
cd secp256k1
|
||||
git pull
|
||||
fi
|
||||
|
||||
LIBSECP_VERSION="b408c6a8b287003d1ade5709e6f7bc3c7f1d5be7"
|
||||
git reset --hard "$LIBSECP_VERSION"
|
||||
git clean -f -x -q
|
||||
|
||||
build_dll i686-w64-mingw32 # 64-bit would be: x86_64-w64-mingw32
|
||||
mv .libs/libsecp256k1-0.dll libsecp256k1.dll
|
||||
|
||||
find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
|
||||
|
||||
echo "building libsecp256k1 finished"
|
||||
@ -1,28 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Lucky number
|
||||
export PYTHONHASHSEED=22
|
||||
|
||||
here=$(dirname "$0")
|
||||
test -n "$here" -a -d "$here" || exit
|
||||
|
||||
echo "Clearing $here/build and $here/dist..."
|
||||
rm "$here"/build/* -rf
|
||||
rm "$here"/dist/* -rf
|
||||
|
||||
mkdir -p /tmp/electrum-build
|
||||
mkdir -p /tmp/electrum-build/pip-cache
|
||||
export PIP_CACHE_DIR="/tmp/electrum-build/pip-cache"
|
||||
|
||||
$here/build-secp256k1.sh || exit 1
|
||||
|
||||
$here/prepare-wine.sh || exit 1
|
||||
|
||||
echo "Resetting modification time in C:\Python..."
|
||||
# (Because of some bugs in pyinstaller)
|
||||
pushd /opt/wine64/drive_c/python*
|
||||
find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
|
||||
popd
|
||||
ls -l /opt/wine64/drive_c/python*
|
||||
|
||||
$here/build-electrum-git.sh && \
|
||||
echo "Done."
|
||||
@ -1,162 +1,88 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, collect_dynamic_libs
|
||||
|
||||
import sys
|
||||
for i, x in enumerate(sys.argv):
|
||||
if x == '--name':
|
||||
cmdline_name = sys.argv[i+1]
|
||||
break
|
||||
else:
|
||||
raise Exception('no name')
|
||||
raise BaseException('no name')
|
||||
|
||||
PYHOME = 'c:/python3'
|
||||
|
||||
home = 'C:\\electrum\\'
|
||||
|
||||
# see https://github.com/pyinstaller/pyinstaller/issues/2005
|
||||
hiddenimports = []
|
||||
hiddenimports += collect_submodules('trezorlib')
|
||||
hiddenimports += collect_submodules('safetlib')
|
||||
hiddenimports += collect_submodules('btchip')
|
||||
hiddenimports += collect_submodules('keepkeylib')
|
||||
hiddenimports += collect_submodules('websocket')
|
||||
hiddenimports += collect_submodules('ckcc')
|
||||
dlls = ['libiconv-2.dll',
|
||||
'libjpeg-7.dll',
|
||||
'libMagickCore-2.dll',
|
||||
'libMagickWand-2.dll',
|
||||
'libpng12-0.dll',
|
||||
'libtiff-3.dll',
|
||||
'libxml2-2.dll',
|
||||
'libzbar-0.dll',
|
||||
'zlib1.dll']
|
||||
|
||||
# safetlib imports PyQt5.Qt. We use a local updated copy of pinmatrix.py until they
|
||||
# release a new version that includes https://github.com/archos-safe-t/python-safet/commit/b1eab3dba4c04fdfc1fcf17b66662c28c5f2380e
|
||||
hiddenimports.remove('safetlib.qt.pinmatrix')
|
||||
zbar_dlls = [('C:\\Program Files (x86)\\Zbar\\bin\\'+x, x) for x in dlls]
|
||||
|
||||
|
||||
# Add libusb binary
|
||||
binaries = [(PYHOME+"/libusb-1.0.dll", ".")]
|
||||
|
||||
# Workaround for "Retro Look":
|
||||
binaries += [b for b in collect_dynamic_libs('PyQt5') if 'qwindowsvista' in b[0]]
|
||||
|
||||
binaries += [('C:/tmp/libsecp256k1.dll', '.')]
|
||||
|
||||
datas = [
|
||||
(home+'electrum/*.json', 'electrum'),
|
||||
(home+'electrum/wordlist/english.txt', 'electrum/wordlist'),
|
||||
(home+'electrum/locale', 'electrum/locale'),
|
||||
(home+'electrum/plugins', 'electrum/plugins'),
|
||||
('C:\\Program Files (x86)\\ZBar\\bin\\', '.'),
|
||||
(home+'electrum/gui/icons', 'electrum/gui/icons'),
|
||||
]
|
||||
datas += collect_data_files('trezorlib')
|
||||
datas += collect_data_files('safetlib')
|
||||
datas += collect_data_files('btchip')
|
||||
datas += collect_data_files('keepkeylib')
|
||||
datas += collect_data_files('ckcc')
|
||||
|
||||
# We don't put these files in to actually include them in the script but to make the Analysis method scan them for imports
|
||||
a = Analysis([home+'run_electrum',
|
||||
home+'electrum/gui/qt/main_window.py',
|
||||
home+'electrum/gui/text.py',
|
||||
home+'electrum/util.py',
|
||||
home+'electrum/wallet.py',
|
||||
home+'electrum/simple_config.py',
|
||||
home+'electrum/bitcoin.py',
|
||||
home+'electrum/dnssec.py',
|
||||
home+'electrum/commands.py',
|
||||
home+'electrum/plugins/cosigner_pool/qt.py',
|
||||
home+'electrum/plugins/email_requests/qt.py',
|
||||
home+'electrum/plugins/trezor/qt.py',
|
||||
home+'electrum/plugins/safe_t/client.py',
|
||||
home+'electrum/plugins/safe_t/qt.py',
|
||||
home+'electrum/plugins/keepkey/qt.py',
|
||||
home+'electrum/plugins/ledger/qt.py',
|
||||
home+'electrum/plugins/coldcard/qt.py',
|
||||
#home+'packages/requests/utils.py'
|
||||
a = Analysis([home+'electrum',
|
||||
home+'gui/qt/main_window.py',
|
||||
home+'gui/text.py',
|
||||
home+'lib/util.py',
|
||||
home+'lib/wallet.py',
|
||||
home+'lib/simple_config.py',
|
||||
home+'lib/bitcoin.py',
|
||||
home+'lib/dnssec.py',
|
||||
home+'lib/commands.py',
|
||||
home+'lib/daemon.py',
|
||||
home+'lib/plugins.py',
|
||||
home+'lib/qrscanner.py',
|
||||
home+'plugins/cosigner_pool/qt.py',
|
||||
home+'plugins/email_requests/qt.py',
|
||||
home+'plugins/trezor/client.py',
|
||||
home+'plugins/trezor/qt.py',
|
||||
home+'plugins/keepkey/qt.py',
|
||||
home+'plugins/ledger/qt.py',
|
||||
home+'packages/requests/utils.py'
|
||||
],
|
||||
binaries=binaries,
|
||||
datas=datas,
|
||||
#pathex=[home+'lib', home+'gui', home+'plugins'],
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=[])
|
||||
|
||||
datas = [
|
||||
(home+'lib/currencies.json', 'electrum'),
|
||||
(home+'lib/wordlist/english.txt', 'electrum/wordlist'),
|
||||
(home+'packages/requests/cacert.pem', 'requests/cacert.pem')
|
||||
],
|
||||
binaries= zbar_dlls,
|
||||
hookspath=[],
|
||||
hiddenimports=["lib", "gui", "plugins", "electrum_gui.qt.icons_rc"]
|
||||
)
|
||||
|
||||
# http://stackoverflow.com/questions/19055089/pyinstaller-onefile-warning-pyconfig-h-when-importing-scipy-or-scipy-signal
|
||||
for d in a.datas:
|
||||
if 'pyconfig' in d[0]:
|
||||
if 'pyconfig' in d[0]:
|
||||
a.datas.remove(d)
|
||||
break
|
||||
|
||||
# Strip out parts of Qt that we never use. Reduces binary size by tens of MBs. see #4815
|
||||
qt_bins2remove=('qt5web', 'qt53d', 'qt5game', 'qt5designer', 'qt5quick',
|
||||
'qt5location', 'qt5test', 'qt5xml', r'pyqt5\qt\qml\qtquick')
|
||||
print("Removing Qt binaries:", *qt_bins2remove)
|
||||
for x in a.binaries.copy():
|
||||
for r in qt_bins2remove:
|
||||
if x[0].lower().startswith(r):
|
||||
a.binaries.remove(x)
|
||||
print('----> Removed x =', x)
|
||||
|
||||
qt_data2remove=(r'pyqt5\qt\translations\qtwebengine_locales', )
|
||||
print("Removing Qt datas:", *qt_data2remove)
|
||||
for x in a.datas.copy():
|
||||
for r in qt_data2remove:
|
||||
if x[0].lower().startswith(r):
|
||||
a.datas.remove(x)
|
||||
print('----> Removed x =', x)
|
||||
|
||||
# hotfix for #3171 (pre-Win10 binaries)
|
||||
a.binaries = [x for x in a.binaries if not x[1].lower().startswith(r'c:\windows')]
|
||||
|
||||
pyz = PYZ(a.pure)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
name=os.path.join('build\\pyi.win32\\electrum', cmdline_name),
|
||||
debug=False,
|
||||
strip=None,
|
||||
upx=False,
|
||||
icon=home+'icons/electrum.ico',
|
||||
console=True)
|
||||
# The console True makes an annoying black box pop up, but it does make Electrum output command line commands, with this turned off no output will be given but commands can still be used
|
||||
|
||||
#####
|
||||
# "standalone" exe with all dependencies packed into it
|
||||
coll = COLLECT(exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=None,
|
||||
upx=True,
|
||||
debug=False,
|
||||
icon=home+'icons/electrum.ico',
|
||||
console=True,
|
||||
name=os.path.join('dist', 'electrum'))
|
||||
|
||||
exe_standalone = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
name=os.path.join('build\\pyi.win32\\electrum', cmdline_name + ".exe"),
|
||||
debug=False,
|
||||
strip=None,
|
||||
upx=False,
|
||||
icon=home+'electrum/gui/icons/electrum.ico',
|
||||
console=False)
|
||||
# console=True makes an annoying black box pop up, but it does make Electrum output command line commands, with this turned off no output will be given but commands can still be used
|
||||
|
||||
exe_portable = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas + [ ('is_portable', 'README.md', 'DATA' ) ],
|
||||
name=os.path.join('build\\pyi.win32\\electrum', cmdline_name + "-portable.exe"),
|
||||
debug=False,
|
||||
strip=None,
|
||||
upx=False,
|
||||
icon=home+'electrum/gui/icons/electrum.ico',
|
||||
console=False)
|
||||
|
||||
#####
|
||||
# exe and separate files that NSIS uses to build installer "setup" exe
|
||||
|
||||
exe_dependent = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
exclude_binaries=True,
|
||||
name=os.path.join('build\\pyi.win32\\electrum', cmdline_name),
|
||||
debug=False,
|
||||
strip=None,
|
||||
upx=False,
|
||||
icon=home+'electrum/gui/icons/electrum.ico',
|
||||
console=False)
|
||||
|
||||
coll = COLLECT(
|
||||
exe_dependent,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=None,
|
||||
upx=True,
|
||||
debug=False,
|
||||
icon=home+'electrum/gui/icons/electrum.ico',
|
||||
console=False,
|
||||
name=os.path.join('dist', 'electrum'))
|
||||
|
||||
@ -1,41 +0,0 @@
|
||||
FROM ubuntu:18.04@sha256:5f4bdc3467537cbbe563e80db2c3ec95d548a9145d64453b06939c4592d67b6d
|
||||
|
||||
ENV LC_ALL=C.UTF-8 LANG=C.UTF-8
|
||||
|
||||
RUN dpkg --add-architecture i386 && \
|
||||
apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
wget=1.19.4-1ubuntu2.1 \
|
||||
gnupg2=2.2.4-1ubuntu1.2 \
|
||||
dirmngr=2.2.4-1ubuntu1.2 \
|
||||
python3-software-properties=0.96.24.32.1 \
|
||||
software-properties-common=0.96.24.32.1
|
||||
|
||||
RUN apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
git=1:2.17.1-1ubuntu0.4 \
|
||||
p7zip-full=16.02+dfsg-6 \
|
||||
make=4.1-9.1ubuntu1 \
|
||||
mingw-w64=5.0.3-1 \
|
||||
autotools-dev=20180224.1 \
|
||||
autoconf=2.69-11 \
|
||||
libtool=2.4.6-2 \
|
||||
gettext=0.19.8.1-6
|
||||
|
||||
RUN wget -nc https://dl.winehq.org/wine-builds/Release.key && \
|
||||
echo "c51bcb8cc4a12abfbd7c7660eaf90f49674d15e222c262f27e6c96429111b822 Release.key" | sha256sum -c - && \
|
||||
apt-key add Release.key && \
|
||||
wget -nc https://dl.winehq.org/wine-builds/winehq.key && \
|
||||
echo "78b185fabdb323971d13bd329fefc8038e08559aa51c4996de18db0639a51df6 winehq.key" | sha256sum -c - && \
|
||||
apt-key add winehq.key && \
|
||||
apt-add-repository https://dl.winehq.org/wine-builds/ubuntu/ && \
|
||||
apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
wine-stable-amd64:amd64=4.0~bionic \
|
||||
wine-stable-i386:i386=4.0~bionic \
|
||||
wine-stable:amd64=4.0~bionic \
|
||||
winehq-stable:amd64=4.0~bionic
|
||||
|
||||
RUN rm -rf /var/lib/apt/lists/* && \
|
||||
apt-get autoremove -y && \
|
||||
apt-get clean
|
||||
@ -1,103 +0,0 @@
|
||||
Deterministic Windows binaries with Docker
|
||||
==========================================
|
||||
|
||||
Produced binaries are deterministic, so you should be able to generate
|
||||
binaries that match the official releases.
|
||||
|
||||
This assumes an Ubuntu host, but it should not be too hard to adapt to another
|
||||
similar system. The docker commands should be executed in the project's root
|
||||
folder.
|
||||
|
||||
1. Install Docker
|
||||
|
||||
```
|
||||
$ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
|
||||
$ sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
$ sudo apt-get update
|
||||
$ sudo apt-get install -y docker-ce
|
||||
```
|
||||
|
||||
2. Build image
|
||||
|
||||
```
|
||||
$ sudo docker build -t electrum-wine-builder-img contrib/build-wine/docker
|
||||
```
|
||||
|
||||
Note: see [this](https://stackoverflow.com/a/40516974/7499128) if having dns problems
|
||||
|
||||
3. Build Windows binaries
|
||||
|
||||
It's recommended to build from a fresh clone
|
||||
(but you can skip this if reproducibility is not necessary).
|
||||
|
||||
```
|
||||
$ FRESH_CLONE=contrib/build-wine/fresh_clone && \
|
||||
rm -rf $FRESH_CLONE && \
|
||||
mkdir -p $FRESH_CLONE && \
|
||||
cd $FRESH_CLONE && \
|
||||
git clone https://github.com/spesmilo/electrum.git && \
|
||||
cd electrum
|
||||
```
|
||||
|
||||
And then build from this directory:
|
||||
```
|
||||
$ git checkout $REV
|
||||
$ sudo docker run -it \
|
||||
--name electrum-wine-builder-cont \
|
||||
-v $PWD:/opt/wine64/drive_c/electrum \
|
||||
--rm \
|
||||
--workdir /opt/wine64/drive_c/electrum/contrib/build-wine \
|
||||
electrum-wine-builder-img \
|
||||
./build.sh
|
||||
```
|
||||
4. The generated binaries are in `./contrib/build-wine/dist`.
|
||||
|
||||
|
||||
|
||||
Note: the `setup` binary (NSIS installer) is not deterministic yet.
|
||||
|
||||
|
||||
Code Signing
|
||||
============
|
||||
|
||||
Electrum Windows builds are signed with a Microsoft Authenticode™ code signing
|
||||
certificate in addition to the GPG-based signatures.
|
||||
|
||||
The advantage of using Authenticode is that Electrum users won't receive a
|
||||
Windows SmartScreen warning when starting it.
|
||||
|
||||
The release signing procedure involves a signer (the holder of the
|
||||
certificate/key) and one or multiple trusted verifiers:
|
||||
|
||||
|
||||
| Signer | Verifier |
|
||||
|-----------------------------------------------------------|-----------------------------------|
|
||||
| Build .exe files using `build.sh` | |
|
||||
| Sign .exe with `./sign.sh` | |
|
||||
| Upload signed files to download server | |
|
||||
| | Build .exe files using `build.sh` |
|
||||
| | Compare files using `unsign.sh` |
|
||||
| | Sign .exe file using `gpg -b` |
|
||||
|
||||
| Signer and verifiers: |
|
||||
|-----------------------------------------------------------------------------------------------|
|
||||
| Upload signatures to 'electrum-signatures' repo, as `$version/$filename.$builder.asc` |
|
||||
|
||||
|
||||
|
||||
Verify Integrity of signed binary
|
||||
=================================
|
||||
|
||||
Every user can verify that the official binary was created from the source code in this
|
||||
repository. To do so, the Authenticode signature needs to be stripped since the signature
|
||||
is not reproducible.
|
||||
|
||||
This procedure removes the differences between the signed and unsigned binary:
|
||||
|
||||
1. Remove the signature from the signed binary using osslsigncode or signtool.
|
||||
2. Set the COFF image checksum for the signed binary to 0x0. This is necessary
|
||||
because pyinstaller doesn't generate a checksum.
|
||||
3. Append null bytes to the _unsigned_ binary until the byte count is a multiple
|
||||
of 8.
|
||||
|
||||
The script `unsign.sh` performs these steps.
|
||||
@ -1,6 +1,6 @@
|
||||
;--------------------------------
|
||||
;Include Modern UI
|
||||
!include "TextFunc.nsh" ;Needed for the $GetSize function. I know, doesn't sound logical, it isn't.
|
||||
!include "TextFunc.nsh" ;Needed for the $GetSize fuction. I know, doesn't sound logical, it isn't.
|
||||
!include "MUI2.nsh"
|
||||
|
||||
;--------------------------------
|
||||
@ -58,7 +58,7 @@
|
||||
VIAddVersionKey ProductName "${PRODUCT_NAME} Installer"
|
||||
VIAddVersionKey Comments "The installer for ${PRODUCT_NAME}"
|
||||
VIAddVersionKey CompanyName "${PRODUCT_NAME}"
|
||||
VIAddVersionKey LegalCopyright "2013-2018 ${PRODUCT_PUBLISHER}"
|
||||
VIAddVersionKey LegalCopyright "2013-2016 ${PRODUCT_PUBLISHER}"
|
||||
VIAddVersionKey FileDescription "${PRODUCT_NAME} Installer"
|
||||
VIAddVersionKey FileVersion ${PRODUCT_VERSION}
|
||||
VIAddVersionKey ProductVersion ${PRODUCT_VERSION}
|
||||
@ -72,7 +72,7 @@
|
||||
!define MUI_ABORTWARNING
|
||||
!define MUI_ABORTWARNING_TEXT "Are you sure you wish to abort the installation of ${PRODUCT_NAME}?"
|
||||
|
||||
!define MUI_ICON "c:\electrum\electrum\gui\icons\electrum.ico"
|
||||
!define MUI_ICON "..\..\icons\electrum.ico"
|
||||
|
||||
;--------------------------------
|
||||
;Pages
|
||||
@ -111,7 +111,7 @@ Section
|
||||
|
||||
;Files to pack into the installer
|
||||
File /r "dist\electrum\*.*"
|
||||
File "c:\electrum\electrum\gui\icons\electrum.ico"
|
||||
File "..\..\icons\electrum.ico"
|
||||
|
||||
;Store installation folder
|
||||
WriteRegStr HKCU "Software\${PRODUCT_NAME}" "" $INSTDIR
|
||||
@ -129,8 +129,6 @@ Section
|
||||
CreateDirectory "$SMPROGRAMS\${PRODUCT_NAME}"
|
||||
CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\Uninstall.lnk" "$INSTDIR\Uninstall.exe" "" "$INSTDIR\Uninstall.exe" 0
|
||||
CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\${PRODUCT_NAME}.lnk" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" "" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" 0
|
||||
CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\${PRODUCT_NAME} Testnet.lnk" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" "--testnet" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" 0
|
||||
|
||||
|
||||
;Links bitcoin: URI's to Electrum
|
||||
WriteRegStr HKCU "Software\Classes\bitcoin" "" "URL:bitcoin Protocol"
|
||||
@ -138,7 +136,7 @@ Section
|
||||
WriteRegStr HKCU "Software\Classes\bitcoin" "DefaultIcon" "$\"$INSTDIR\electrum.ico, 0$\""
|
||||
WriteRegStr HKCU "Software\Classes\bitcoin\shell\open\command" "" "$\"$INSTDIR\electrum-${PRODUCT_VERSION}.exe$\" $\"%1$\""
|
||||
|
||||
;Adds an uninstaller possibility to Windows Uninstall or change a program section
|
||||
;Adds an uninstaller possibilty to Windows Uninstall or change a program section
|
||||
WriteRegStr HKCU "${PRODUCT_UNINST_KEY}" "DisplayName" "$(^Name)"
|
||||
WriteRegStr HKCU "${PRODUCT_UNINST_KEY}" "UninstallString" "$INSTDIR\Uninstall.exe"
|
||||
WriteRegStr HKCU "${PRODUCT_UNINST_KEY}" "DisplayVersion" "${PRODUCT_VERSION}"
|
||||
@ -170,4 +168,4 @@ Section "Uninstall"
|
||||
DeleteRegKey HKCU "Software\Classes\bitcoin"
|
||||
DeleteRegKey HKCU "Software\${PRODUCT_NAME}"
|
||||
DeleteRegKey HKCU "${PRODUCT_UNINST_KEY}"
|
||||
SectionEnd
|
||||
SectionEnd
|
||||
13
contrib/build-wine/portable.patch
Normal file
@ -0,0 +1,13 @@
|
||||
diff --git a/electrum b/electrum
|
||||
index 8c972c6..46903b7 100755
|
||||
--- a/electrum
|
||||
+++ b/electrum
|
||||
@@ -454,6 +454,8 @@ if __name__ == '__main__':
|
||||
if config_options.get('server'):
|
||||
config_options['auto_connect'] = False
|
||||
|
||||
+ config_options['portable'] = True
|
||||
+
|
||||
if config_options.get('portable'):
|
||||
config_options['electrum_path'] = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'electrum_data')
|
||||
|
||||
91
contrib/build-wine/prepare-hw.sh
Executable file
@ -0,0 +1,91 @@
|
||||
#!/bin/bash
|
||||
|
||||
TREZOR_GIT_URL=git://github.com/trezor/python-trezor.git
|
||||
KEEPKEY_GIT_URL=git://github.com/keepkey/python-keepkey.git
|
||||
BTCHIP_GIT_URL=git://github.com/LedgerHQ/btchip-python.git
|
||||
|
||||
BRANCH=master
|
||||
|
||||
# These settings probably don't need any change
|
||||
export WINEPREFIX=/opt/wine64
|
||||
|
||||
PYHOME=c:/python27
|
||||
PYTHON="wine $PYHOME/python.exe "
|
||||
|
||||
# Let's begin!
|
||||
cd `dirname $0`
|
||||
set -e
|
||||
|
||||
cd tmp
|
||||
|
||||
# downoad mingw-get-setup.exe
|
||||
#wget http://downloads.sourceforge.net/project/mingw/Installer/mingw-get-setup.exe
|
||||
#wine mingw-get-setup.exe
|
||||
|
||||
#echo "add c:\MinGW\bin to PATH using regedit"
|
||||
#regedit
|
||||
#exit
|
||||
|
||||
#wine mingw-get install gcc
|
||||
#wine mingw-get install mingw-utils
|
||||
#wine mingw-get install mingw32-libz
|
||||
|
||||
#create cfg file
|
||||
#printf "[build]\ncompiler=mingw32\n" > /opt/me/wine64/drive_c/Python27/Lib/distutils/distutils.cfg
|
||||
|
||||
# Install Cython
|
||||
#wine "$PYHOME\\Scripts\\easy_install.exe" cython
|
||||
|
||||
|
||||
# not working
|
||||
##wine "$PYHOME\\Scripts\\easy_install.exe" hidapi
|
||||
|
||||
#git clone https://github.com/trezor/cython-hidapi.git
|
||||
|
||||
#replace: from distutils.core import setup, Extenstion
|
||||
|
||||
#cd cython-hidapi
|
||||
#git submodule init
|
||||
#git submodule update
|
||||
#$PYTHON setup.py install
|
||||
#cd ..
|
||||
|
||||
# trezor
|
||||
TREZOR_URL="https://pypi.python.org/packages/26/80/26c9676cbee58e50e7f7dd6a797931203cf198ff7590f55842d620cd60a8/trezor-0.7.12.tar.gz"
|
||||
if ! [ -d "trezor-0.7.12" ]; then
|
||||
wget $TREZOR_URL
|
||||
tar -xvzf trezor-0.7.12.tar.gz
|
||||
fi
|
||||
cd trezor-0.7.12
|
||||
$PYTHON setup.py install
|
||||
cd ..
|
||||
|
||||
#keepkey
|
||||
if [ -d "keepkey-git" ]; then
|
||||
cd keepkey-git
|
||||
git checkout master
|
||||
git pull
|
||||
cd ..
|
||||
else
|
||||
git clone -b $BRANCH $KEEPKEY_GIT_URL keepkey-git
|
||||
fi
|
||||
cd keepkey-git
|
||||
# checkout 2 commits before v0.7.3, because it fails to build
|
||||
# git checkout v0.7.3
|
||||
git checkout 7abe0f0c9026907e9a8db1d231e084df2c175817
|
||||
$PYTHON setup.py install
|
||||
cd ..
|
||||
|
||||
#btchip
|
||||
if [ -d "btchip-git" ]; then
|
||||
cd btchip-git
|
||||
git checkout master
|
||||
git pull
|
||||
cd ..
|
||||
else
|
||||
git clone -b $BRANCH $BTCHIP_GIT_URL btchip-git
|
||||
fi
|
||||
cd btchip-git
|
||||
$PYTHON setup.py install
|
||||
cd ..
|
||||
|
||||
@ -1,84 +1,81 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Please update these carefully, some versions won't work under Wine
|
||||
NSIS_FILENAME=nsis-3.04-setup.exe
|
||||
NSIS_URL=https://prdownloads.sourceforge.net/nsis/$NSIS_FILENAME?download
|
||||
NSIS_SHA256=4e1db5a7400e348b1b46a4a11b6d9557fd84368e4ad3d4bc4c1be636c89638aa
|
||||
# Please update these links carefully, some versions won't work under Wine
|
||||
PYTHON_URL=https://www.python.org/ftp/python/2.7.13/python-2.7.13.msi
|
||||
PYQT4_URL=http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.1/PyQt4-4.11.1-gpl-Py2.7-Qt4.8.6-x32.exe
|
||||
PYWIN32_URL=http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win32-py2.7.exe/download
|
||||
NSIS_URL=http://prdownloads.sourceforge.net/nsis/nsis-2.46-setup.exe?download
|
||||
SETUPTOOLS_URL=https://pypi.python.org/packages/2.7/s/setuptools/setuptools-0.6c11.win32-py2.7.exe
|
||||
|
||||
ZBAR_FILENAME=zbarw-20121031-setup.exe
|
||||
ZBAR_URL=https://sourceforge.net/projects/zbarw/files/$ZBAR_FILENAME/download
|
||||
ZBAR_SHA256=177e32b272fa76528a3af486b74e9cb356707be1c5ace4ed3fcee9723e2c2c02
|
||||
|
||||
LIBUSB_FILENAME=libusb-1.0.22.7z
|
||||
LIBUSB_URL=https://prdownloads.sourceforge.net/project/libusb/libusb-1.0/libusb-1.0.22/$LIBUSB_FILENAME?download
|
||||
LIBUSB_SHA256=671f1a420757b4480e7fadc8313d6fb3cbb75ca00934c417c1efa6e77fb8779b
|
||||
|
||||
PYTHON_VERSION=3.6.8
|
||||
|
||||
## These settings probably don't need change
|
||||
export WINEPREFIX=/opt/wine64
|
||||
#export WINEARCH='win32'
|
||||
|
||||
PYTHON_FOLDER="python3"
|
||||
PYHOME="c:/$PYTHON_FOLDER"
|
||||
PYHOME=c:/python27
|
||||
PYTHON="wine $PYHOME/python.exe -OO -B"
|
||||
|
||||
|
||||
# Let's begin!
|
||||
here="$(dirname "$(readlink -e "$0")")"
|
||||
cd `dirname $0`
|
||||
set -e
|
||||
|
||||
. $here/../build_tools_util.sh
|
||||
# Clean up Wine environment
|
||||
echo "Cleaning $WINEPREFIX"
|
||||
rm -rf $WINEPREFIX/
|
||||
mkdir $WINEPREFIX
|
||||
echo "done"
|
||||
|
||||
wine 'wineboot'
|
||||
|
||||
echo "Cleaning tmp"
|
||||
rm -rf tmp
|
||||
mkdir -p tmp
|
||||
echo "done"
|
||||
|
||||
cd /tmp/electrum-build
|
||||
cd tmp
|
||||
|
||||
# Install Python
|
||||
# note: you might need "sudo apt-get install dirmngr" for the following
|
||||
# keys from https://www.python.org/downloads/#pubkeys
|
||||
KEYLIST_PYTHON_DEV="531F072D39700991925FED0C0EDDC5F26A45C816 26DEA9D4613391EF3E25C9FF0A5B101836580288 CBC547978A3964D14B9AB36A6AF053F07D9DC8D2 C01E1CAD5EA2C4F0B8E3571504C367C218ADD4FF 12EF3DC38047DA382D18A5B999CDEA9DA4135B38 8417157EDBE73D9EAC1E539B126EB563A74B06BF DBBF2EEBF925FAADCF1F3FFFD9866941EA5BBD71 2BA0DB82515BBB9EFFAC71C5C9BE28DEE6DF025C 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D C9B104B3DD3AA72D7CCB1066FB9921286F5E1540 97FC712E4C024BBEA48A61ED3A5CA953F73C700D 7ED10B6531D7C8E1BC296021FC624643487034E5"
|
||||
KEYRING_PYTHON_DEV="keyring-electrum-build-python-dev.gpg"
|
||||
for server in $(shuf -e ha.pool.sks-keyservers.net \
|
||||
hkp://p80.pool.sks-keyservers.net:80 \
|
||||
keyserver.ubuntu.com \
|
||||
hkp://keyserver.ubuntu.com:80) ; do
|
||||
retry gpg --no-default-keyring --keyring $KEYRING_PYTHON_DEV --keyserver "$server" --recv-keys $KEYLIST_PYTHON_DEV \
|
||||
&& break || : ;
|
||||
done
|
||||
for msifile in core dev exe lib pip tools; do
|
||||
echo "Installing $msifile..."
|
||||
wget -N -c "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi"
|
||||
wget -N -c "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi.asc"
|
||||
verify_signature "${msifile}.msi.asc" $KEYRING_PYTHON_DEV
|
||||
wine msiexec /i "${msifile}.msi" /qb TARGETDIR=$PYHOME
|
||||
done
|
||||
wget -O python.msi "$PYTHON_URL"
|
||||
wine msiexec /q /i python.msi
|
||||
|
||||
# Install dependencies specific to binaries
|
||||
# note that this also installs pinned versions of both pip and setuptools
|
||||
$PYTHON -m pip install -r "$here"/../deterministic-build/requirements-binaries.txt
|
||||
# Install PyWin32
|
||||
wget -O pywin32.exe "$PYWIN32_URL"
|
||||
wine pywin32.exe
|
||||
|
||||
# Install PyInstaller
|
||||
$PYTHON -m pip install pyinstaller==3.4 --no-use-pep517
|
||||
# Install PyQt4
|
||||
wget -O PyQt.exe "$PYQT4_URL"
|
||||
wine PyQt.exe
|
||||
|
||||
# Install setuptools
|
||||
wget -O setuptools.exe "$SETUPTOOLS_URL"
|
||||
wine setuptools.exe
|
||||
|
||||
|
||||
# Install pyinstaller
|
||||
$PYTHON -m pip install pyinstaller==3.2.1
|
||||
$PYTHON -m pip install win_inet_pton
|
||||
|
||||
# Install ZBar
|
||||
download_if_not_exist $ZBAR_FILENAME "$ZBAR_URL"
|
||||
verify_hash $ZBAR_FILENAME "$ZBAR_SHA256"
|
||||
wine "$PWD/$ZBAR_FILENAME" /S
|
||||
wget -q -O zbar.exe "http://sourceforge.net/projects/zbar/files/zbar/0.10/zbar-0.10-setup.exe/download"
|
||||
wine zbar.exe
|
||||
|
||||
# install Cryptodome
|
||||
$PYTHON -m pip install pycryptodomex
|
||||
|
||||
# Upgrade setuptools (so Electrum can be installed later)
|
||||
$PYTHON -m pip install setuptools --upgrade
|
||||
|
||||
|
||||
# Install NSIS installer
|
||||
download_if_not_exist $NSIS_FILENAME "$NSIS_URL"
|
||||
verify_hash $NSIS_FILENAME "$NSIS_SHA256"
|
||||
wine "$PWD/$NSIS_FILENAME" /S
|
||||
echo "Make sure to untick 'Start NSIS' and 'Show release notes'"
|
||||
wget -q -O nsis.exe "$NSIS_URL"
|
||||
wine nsis.exe
|
||||
|
||||
download_if_not_exist $LIBUSB_FILENAME "$LIBUSB_URL"
|
||||
verify_hash $LIBUSB_FILENAME "$LIBUSB_SHA256"
|
||||
7z x -olibusb $LIBUSB_FILENAME -aoa
|
||||
# Install UPX
|
||||
#wget -O upx.zip "http://upx.sourceforge.net/download/upx308w.zip"
|
||||
#unzip -o upx.zip
|
||||
#cp upx*/upx.exe .
|
||||
|
||||
cp libusb/MS32/dll/libusb-1.0.dll $WINEPREFIX/drive_c/$PYTHON_FOLDER/
|
||||
|
||||
mkdir -p $WINEPREFIX/drive_c/tmp
|
||||
cp secp256k1/libsecp256k1.dll $WINEPREFIX/drive_c/tmp/
|
||||
|
||||
echo "Wine is configured."
|
||||
# add dlls needed for pyinstaller:
|
||||
cp $WINEPREFIX/drive_c/windows/system32/msvcp90.dll $WINEPREFIX/drive_c/Python27/
|
||||
cp $WINEPREFIX/drive_c/windows/system32/msvcm90.dll $WINEPREFIX/drive_c/Python27/
|
||||
|
||||
@ -1,34 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
here=$(dirname "$0")
|
||||
test -n "$here" -a -d "$here" || exit
|
||||
cd $here
|
||||
|
||||
CERT_FILE=${CERT_FILE:-~/codesigning/cert.pem}
|
||||
KEY_FILE=${KEY_FILE:-~/codesigning/key.pem}
|
||||
if [[ ! -f "$CERT_FILE" ]]; then
|
||||
ls $CERT_FILE
|
||||
echo "Make sure that $CERT_FILE and $KEY_FILE exist"
|
||||
fi
|
||||
|
||||
if ! which osslsigncode > /dev/null 2>&1; then
|
||||
echo "Please install osslsigncode"
|
||||
fi
|
||||
|
||||
rm -rf signed
|
||||
mkdir -p signed >/dev/null 2>&1
|
||||
|
||||
cd dist
|
||||
echo "Found $(ls *.exe | wc -w) files to sign."
|
||||
for f in $(ls *.exe); do
|
||||
echo "Signing $f..."
|
||||
osslsigncode sign \
|
||||
-certs "$CERT_FILE" \
|
||||
-key "$KEY_FILE" \
|
||||
-n "Electrum" \
|
||||
-i "https://electrum.org/" \
|
||||
-t "http://timestamp.digicert.com/" \
|
||||
-in "$f" \
|
||||
-out "../signed/$f"
|
||||
ls ../signed/$f -lah
|
||||
done
|
||||
@ -1,56 +0,0 @@
|
||||
#!/bin/bash
|
||||
here=$(dirname "$0")
|
||||
test -n "$here" -a -d "$here" || exit
|
||||
cd $here
|
||||
|
||||
if ! which osslsigncode > /dev/null 2>&1; then
|
||||
echo "Please install osslsigncode"
|
||||
exit
|
||||
fi
|
||||
|
||||
# exit if command fails
|
||||
set -e
|
||||
|
||||
mkdir -p signed >/dev/null 2>&1
|
||||
mkdir -p signed/stripped >/dev/null 2>&1
|
||||
|
||||
version=`python3 -c "import electrum; print(electrum.version.ELECTRUM_VERSION)"`
|
||||
|
||||
echo "Found $(ls dist/*.exe | wc -w) files to verify."
|
||||
|
||||
for mine in $(ls dist/*.exe); do
|
||||
echo "---------------"
|
||||
f=$(basename $mine)
|
||||
echo "Downloading https://download.electrum.org/$version/$f"
|
||||
wget -q https://download.electrum.org/$version/$f -O signed/$f
|
||||
out="signed/stripped/$f"
|
||||
size=$( wc -c < $mine )
|
||||
# Step 1: Remove PE signature from signed binary
|
||||
osslsigncode remove-signature -in signed/$f -out $out > /dev/null 2>&1
|
||||
# Step 2: Remove checksum and padding from signed binary
|
||||
python3 <<EOF
|
||||
pe_file = "$out"
|
||||
size= $size
|
||||
with open(pe_file, "rb") as f:
|
||||
binary = bytearray(f.read())
|
||||
pe_offset = int.from_bytes(binary[0x3c:0x3c+4], byteorder="little")
|
||||
checksum_offset = pe_offset + 88
|
||||
for b in range(4):
|
||||
binary[checksum_offset + b] = 0
|
||||
l = len(binary)
|
||||
n = l - size
|
||||
if n > 0:
|
||||
if binary[-n:] != bytearray(n):
|
||||
print('expecting failure for', str(pe_file))
|
||||
binary = binary[:size]
|
||||
with open(pe_file, "wb") as f:
|
||||
f.write(binary)
|
||||
EOF
|
||||
chmod +x $out
|
||||
if cmp -s $out $mine; then
|
||||
echo "Success: $f"
|
||||
gpg --sign --armor --detach signed/$f
|
||||
else
|
||||
echo "Failure: $f"
|
||||
fi
|
||||
done
|
||||
@ -1,69 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
RED='\033[0;31m'
|
||||
BLUE='\033[0;34m'
|
||||
YELLOW='\033[0;33m'
|
||||
NC='\033[0m' # No Color
|
||||
function info {
|
||||
printf "\r💬 ${BLUE}INFO:${NC} ${1}\n"
|
||||
}
|
||||
function fail {
|
||||
printf "\r🗯 ${RED}ERROR:${NC} ${1}\n"
|
||||
exit 1
|
||||
}
|
||||
function warn {
|
||||
printf "\r⚠️ ${YELLOW}WARNING:${NC} ${1}\n"
|
||||
}
|
||||
|
||||
|
||||
# based on https://superuser.com/questions/497940/script-to-verify-a-signature-with-gpg
|
||||
function verify_signature() {
|
||||
local file=$1 keyring=$2 out=
|
||||
if out=$(gpg --no-default-keyring --keyring "$keyring" --status-fd 1 --verify "$file" 2>/dev/null) &&
|
||||
echo "$out" | grep -qs "^\[GNUPG:\] VALIDSIG "; then
|
||||
return 0
|
||||
else
|
||||
echo "$out" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function verify_hash() {
|
||||
local file=$1 expected_hash=$2
|
||||
actual_hash=$(sha256sum $file | awk '{print $1}')
|
||||
if [ "$actual_hash" == "$expected_hash" ]; then
|
||||
return 0
|
||||
else
|
||||
echo "$file $actual_hash (unexpected hash)" >&2
|
||||
rm "$file"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function download_if_not_exist() {
|
||||
local file_name=$1 url=$2
|
||||
if [ ! -e $file_name ] ; then
|
||||
wget -O $file_name "$url"
|
||||
fi
|
||||
}
|
||||
|
||||
# https://github.com/travis-ci/travis-build/blob/master/lib/travis/build/templates/header.sh
|
||||
function retry() {
|
||||
local result=0
|
||||
local count=1
|
||||
while [ $count -le 3 ]; do
|
||||
[ $result -ne 0 ] && {
|
||||
echo -e "\nThe command \"$@\" failed. Retrying, $count of 3.\n" >&2
|
||||
}
|
||||
! { "$@"; result=$?; }
|
||||
[ $result -eq 0 ] && break
|
||||
count=$(($count + 1))
|
||||
sleep 1
|
||||
done
|
||||
|
||||
[ $count -gt 3 ] && {
|
||||
echo -e "\nThe command \"$@\" failed 3 times.\n" >&2
|
||||
}
|
||||
|
||||
return $result
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
here=$(dirname "$0")
|
||||
test -n "$here" -a -d "$here" || exit
|
||||
|
||||
cd ${here}/../..
|
||||
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
function get_git_mtime {
|
||||
if [ $# -eq 1 ]; then
|
||||
git log --pretty=%at -n1 -- $1
|
||||
else
|
||||
git log --pretty=%ar -n1 -- $2
|
||||
fi
|
||||
}
|
||||
|
||||
fail=0
|
||||
|
||||
|
||||
if [ $(date +%s -d "2 weeks ago") -gt $(get_git_mtime "contrib/deterministic-build/electrum-locale/") ]; then
|
||||
echo "Last update from electrum-locale is older than 2 weeks."\
|
||||
"Please update it to incorporate the latest translations from crowdin."
|
||||
fail=1
|
||||
fi
|
||||
|
||||
exit ${fail}
|
||||
@ -1 +0,0 @@
|
||||
Subproject commit ff5ad3a4436dddcc82799f8a91793013240c3b7b
|
||||
@ -1,41 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError as e:
|
||||
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
|
||||
|
||||
|
||||
def check_restriction(p, r):
|
||||
# See: https://www.python.org/dev/peps/pep-0496/
|
||||
# Hopefully we don't need to parse the whole microlanguage
|
||||
if "extra" in r and "[" not in p:
|
||||
return False
|
||||
for marker in ["os_name", "platform_release", "sys_platform", "platform_system"]:
|
||||
if marker in r:
|
||||
return True
|
||||
|
||||
|
||||
for p in sys.stdin.read().split():
|
||||
p = p.strip()
|
||||
if not p:
|
||||
continue
|
||||
assert "==" in p, "This script expects a list of packages with pinned version, e.g. package==1.2.3, not {}".format(p)
|
||||
p, v = p.rsplit("==", 1)
|
||||
try:
|
||||
data = requests.get("https://pypi.org/pypi/{}/{}/json".format(p, v)).json()["info"]
|
||||
except ValueError:
|
||||
raise Exception("Package could not be found: {}=={}".format(p, v))
|
||||
try:
|
||||
for r in data["requires_dist"]:
|
||||
if ";" not in r:
|
||||
continue
|
||||
d, restricted = r.split(";", 1)
|
||||
if check_restriction(d, restricted):
|
||||
print(d, sep=" ")
|
||||
print("Installing {} from {} although it is only needed for {}".format(d, p, restricted), file=sys.stderr)
|
||||
except TypeError:
|
||||
# Has no dependencies at all
|
||||
continue
|
||||
|
||||
@ -1,56 +0,0 @@
|
||||
pip==19.0.1 \
|
||||
--hash=sha256:aae79c7afe895fb986ec751564f24d97df1331bb99cdfec6f70dada2f40c0044 \
|
||||
--hash=sha256:e81ddd35e361b630e94abeda4a1eddd36d47a90e71eb00f38f46b57f787cd1a5
|
||||
pycryptodomex==3.7.3 \
|
||||
--hash=sha256:0bda549e20db1eb8e29fb365d10acf84b224d813b1131c828fc830b2ce313dcd \
|
||||
--hash=sha256:1210c0818e5334237b16d99b5785aa0cee815d9997ee258bd5e2936af8e8aa50 \
|
||||
--hash=sha256:2090dc8cd7843eae75bd504b9be86792baa171fc5a758ea3f60188ab67ca95cf \
|
||||
--hash=sha256:22e6784b65dfdd357bf9a8a842db445192b227103e2c3137a28c489c46742135 \
|
||||
--hash=sha256:2edb8c3965a77e3092b5c5c1233ffd32de083f335202013f52d662404191ac79 \
|
||||
--hash=sha256:310fe269ac870135ff610d272e88dcb594ee58f40ac237a688d7c972cbca43e8 \
|
||||
--hash=sha256:456136b7d459f000794a67b23558351c72e21f0c2d4fcaa09fc99dae7844b0ef \
|
||||
--hash=sha256:463e49a9c5f1fa7bd36aff8debae0b5c487868c1fb66704529f2ad7e92f0cc9f \
|
||||
--hash=sha256:4a33b2828799ef8be789a462e6645ea6fe2c42b0df03e6763ccbfd1789c453e6 \
|
||||
--hash=sha256:5ff02dff1b03929e6339226b318aa59bd0b5c362f96e3e0eb7f3401d30594ed3 \
|
||||
--hash=sha256:6b1db8234b8ee2b30435d9e991389c2eeae4d45e09e471ffe757ba1dfae682bb \
|
||||
--hash=sha256:6eb67ee02de143cd19e36a52bd3869a9dc53e9184cd6bed5c39ff71dee2f6a45 \
|
||||
--hash=sha256:6f42eea5afc7eee29494fdfddc6bb7173953d4197d9200e4f67096c2a24bc21b \
|
||||
--hash=sha256:87bc8082e2de2247df7d0b161234f8edb1384294362cc0c8db9324463097578b \
|
||||
--hash=sha256:8df93d34bc0e3a28a27652070164683a07d8a50c628119d6e0f7710f4d01b42f \
|
||||
--hash=sha256:989952c39e8fef1c959f0a0f85656e29c41c01162e33a3f5fd8ce71e47262ae9 \
|
||||
--hash=sha256:a4a203077e2f312ec8677dde80a5c4e6fe5a82a46173a8edc8da668602a3e073 \
|
||||
--hash=sha256:a793c1242dffd39f585ae356344e8935d30f01f6be7d4c62ffc87af376a2f5f9 \
|
||||
--hash=sha256:b70fe991564e178af02ccf89435a8f9e8d052707a7c4b95bf6027cb785da3175 \
|
||||
--hash=sha256:b83594196e3661cb78c97b80a62fbfbba2add459dfd532b58e7a7c62dd06aab4 \
|
||||
--hash=sha256:ba27725237d0a3ea66ec2b6b387259471840908836711a3b215160808dffed0f \
|
||||
--hash=sha256:d1ab8ad1113cdc553ca50c4d5f0142198c317497364c0c70443d69f7ad1c9288 \
|
||||
--hash=sha256:dce039a8a8a318d7af83cae3fd08d58cefd2120075dfac0ae14d706974040f63 \
|
||||
--hash=sha256:e3213037ea33c85ab705579268cbc8a4433357e9fb99ec7ce9fdcc4d4eec1d50 \
|
||||
--hash=sha256:ec8d8023d31ef72026d46e9fb301ff8759eff5336bcf3d1510836375f53f96a9 \
|
||||
--hash=sha256:ece65730d50aa57a1330d86d81582a2d1587b2ca51cb34f586da8551ddc68fee \
|
||||
--hash=sha256:ed21fc515e224727793e4cc3fb3d00f33f59e3a167d3ad6ac1475ab3b05c2f9e \
|
||||
--hash=sha256:eec1132d878153d61a05424f35f089f951bd6095a4f6c60bdd2ef8919d44425e
|
||||
PyQt5==5.11.3 \
|
||||
--hash=sha256:517e4339135c4874b799af0d484bc2e8c27b54850113a68eec40a0b56534f450 \
|
||||
--hash=sha256:ac1eb5a114b6e7788e8be378be41c5e54b17d5158994504e85e43b5fca006a39 \
|
||||
--hash=sha256:d2309296a5a79d0a1c0e6c387c30f0398b65523a6dcc8a19cc172e46b949e00d \
|
||||
--hash=sha256:e85936bae1581bcb908847d2038e5b34237a5e6acc03130099a78930770e7ead
|
||||
PyQt5-sip==4.19.13 \
|
||||
--hash=sha256:125f77c087572c9272219cda030a63c2f996b8507592b2a54d7ef9b75f9f054d \
|
||||
--hash=sha256:14c37b06e3fb7c2234cb208fa461ec4e62b4ba6d8b32ca3753c0b2cfd61b00e3 \
|
||||
--hash=sha256:1cb2cf52979f9085fc0eab7e0b2438eb4430d4aea8edec89762527e17317175b \
|
||||
--hash=sha256:4babef08bccbf223ec34464e1ed0a23caeaeea390ca9a3529227d9a57f0d6ee4 \
|
||||
--hash=sha256:53cb9c1208511cda0b9ed11cffee992a5a2f5d96eb88722569b2ce65ecf6b960 \
|
||||
--hash=sha256:549449d9461d6c665cbe8af4a3808805c5e6e037cd2ce4fd93308d44a049bfac \
|
||||
--hash=sha256:5f5b3089b200ff33de3f636b398e7199b57a6b5c1bb724bdb884580a072a14b5 \
|
||||
--hash=sha256:a4d9bf6e1fa2dd6e73f1873f1a47cee11a6ba0cf9ba8cf7002b28c76823600d0 \
|
||||
--hash=sha256:a4ee6026216f1fbe25c8847f9e0fbce907df5b908f84816e21af16ec7666e6fe \
|
||||
--hash=sha256:a91a308a5e0cc99de1e97afd8f09f46dd7ca20cfaa5890ef254113eebaa1adff \
|
||||
--hash=sha256:b0342540da479d2713edc68fb21f307473f68da896ad5c04215dae97630e0069 \
|
||||
--hash=sha256:f997e21b4e26a3397cb7b255b8d1db5b9772c8e0c94b6d870a5a0ab5c27eacaa
|
||||
setuptools==40.8.0 \
|
||||
--hash=sha256:6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d \
|
||||
--hash=sha256:e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab
|
||||
wheel==0.32.3 \
|
||||
--hash=sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6 \
|
||||
--hash=sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44
|
||||
@ -1,134 +0,0 @@
|
||||
btchip-python==0.1.28 \
|
||||
--hash=sha256:da09d0d7a6180d428833795ea9a233c3b317ddfcccea8cc6f0eba59435e5dd83
|
||||
certifi==2018.11.29 \
|
||||
--hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \
|
||||
--hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
|
||||
ckcc-protocol==0.7.2 \
|
||||
--hash=sha256:31ee5178cfba8895eb2a6b8d06dc7830b51461a0ff767a670a64707c63e6b264 \
|
||||
--hash=sha256:498db4ccdda018cd9f40210f5bd02ddcc98e7df583170b2eab4035c86c3cc03b
|
||||
click==7.0 \
|
||||
--hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \
|
||||
--hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7
|
||||
construct==2.9.45 \
|
||||
--hash=sha256:2271a0efd0798679dea825ff47e22a4c550456a5db0ba8baa82f7eae0af0118c
|
||||
Cython==0.29.4 \
|
||||
--hash=sha256:004eeb2fc64e9db4a3bc0d65583d69769c7242d29d9335121cbab776688dc122 \
|
||||
--hash=sha256:028ee8571884a129e0d5c4d48296f6b3ea679668c096bb65fe8b2ff7ac29d707 \
|
||||
--hash=sha256:162b8b794ca9210c7039d54b6d96cd342e0404e41e7e467baae69f0252d7e52a \
|
||||
--hash=sha256:1aba4cf581d203e8fa3b6a7b432b09416e4f93c0d1f7744834acacfe3e9db424 \
|
||||
--hash=sha256:1be8f08c87b92a880f2fd19f93293e738ca8647834ad05625635320cec9ecad4 \
|
||||
--hash=sha256:21c707a811912aeb65abe8a66e5adebc759889661c8f4cf677523cd33c609084 \
|
||||
--hash=sha256:234de250ef09ba667fc6a8f6ba07712d3fe5bb8d92d70d2b958d4c56e3172c4a \
|
||||
--hash=sha256:33dad82003df518e1242ac3b0592fc63c49d65d0d37b696cb43b7d35085e6bd5 \
|
||||
--hash=sha256:54ee6cbc1397b27670e598ae15cab36e826a01605f63bf267a5fd2642bd8a147 \
|
||||
--hash=sha256:6058c57657d2704c9fad8a56458173d2f525dce4083ca46e9b99b1b35da2b27f \
|
||||
--hash=sha256:6d3065f39ea1354eba4807e2752e97d57f26d6f68bc4a4c561264ca4300c46cb \
|
||||
--hash=sha256:7059e5acac1d7a82e75e553924d9ea59b0e79203adf903cb999287fbcc8f50f1 \
|
||||
--hash=sha256:71c31e01f20a3a7273f6f38760d29170ee89e895be540481130cb173ef6b7246 \
|
||||
--hash=sha256:89225447801e8bd0f6d8e2c0807ded83af8ad7bf4086b5ecf1f22c5a68d1b3e3 \
|
||||
--hash=sha256:9783f11fe4a4af66b0aa0da68fda833c10b95edd9099a6dbe710d03bcb96adf2 \
|
||||
--hash=sha256:9a0be0aac30d71fe490a2b0377fca6e13a5242ecc01d09c7a358f1f2fcb07a80 \
|
||||
--hash=sha256:9a2cccc26dcf2df1e0048cdf63bd714f1d5dfad457f03b9938c5cc3eef74c9ab \
|
||||
--hash=sha256:b0889310f8558eb406a4a853d63553b90c621476f1b5b80b46b1ff57eef198cf \
|
||||
--hash=sha256:c46ef7b771c88512435399e5ffbc3a70079d4945123d6fbfc6211b4cfdc4e546 \
|
||||
--hash=sha256:c71a77c1047d65e5b4e614053cbb7b567c36359b2bc1d27fba23b984ab6dddd0 \
|
||||
--hash=sha256:c9361811a1a49db11efce54fedd01a5544af8db074fce471c720bdb85ec9c7a8 \
|
||||
--hash=sha256:d021a8326a1d2cdb182b0dd7f49bb42d8a4e6ddfb3c8d388ee5be26d57d49f3b \
|
||||
--hash=sha256:d1ee3d39c73a094ae5b6e2f9263ae0dc61af1b549a0869ade8c3c30325ed9f26 \
|
||||
--hash=sha256:d49d7cf82192edc6e386262a07ceb3515028afbd9009dd8ec669d2c0a9f20128 \
|
||||
--hash=sha256:dc5fc1fa072a98f152e46465aaf3e02b3ea36a9d3b8c79bfabd47b0e3ad9226c \
|
||||
--hash=sha256:e290fed7fe73860657af564e596fff87e75cfda861c067e89212970a47826cc6 \
|
||||
--hash=sha256:fcf9a9a566ab98495db641eefee471eb03df71e394ee51fdfa9b4c0b9f6928eb \
|
||||
--hash=sha256:fe8c1d2538867bf2753988a4a2d548bcb211fcbba125aa3e9092391b16f47b56
|
||||
ecdsa==0.13 \
|
||||
--hash=sha256:40d002cf360d0e035cf2cb985e1308d41aaa087cbfc135b2dc2d844296ea546c \
|
||||
--hash=sha256:64cf1ee26d1cde3c73c6d7d107f835fed7c6a2904aef9eac223d57ad800c43fa
|
||||
hidapi==0.7.99.post21 \
|
||||
--hash=sha256:1ac170f4d601c340f2cd52fd06e85c5e77bad7ceac811a7bb54b529f7dc28c24 \
|
||||
--hash=sha256:6424ad75da0021ce8c1bcd78056a04adada303eff3c561f8d132b85d0a914cb3 \
|
||||
--hash=sha256:8d3be666f464347022e2b47caf9132287885d9eacc7895314fc8fefcb4e42946 \
|
||||
--hash=sha256:92878bad7324dee619b7832fbfc60b5360d378aa7c5addbfef0a410d8fd342c7 \
|
||||
--hash=sha256:b4b1f6aff0192e9be153fe07c1b7576cb7a1ff52e78e3f76d867be95301a8e87 \
|
||||
--hash=sha256:bf03f06f586ce7d8aeb697a94b7dba12dc9271aae92d7a8d4486360ff711a660 \
|
||||
--hash=sha256:c76de162937326fcd57aa399f94939ce726242323e65c15c67e183da1f6c26f7 \
|
||||
--hash=sha256:d4ad1e46aef98783a9e6274d523b8b1e766acfc3d72828cd44a337564d984cfa \
|
||||
--hash=sha256:d4b5787a04613503357606bb10e59c3e2c1114fa00ee328b838dd257f41cbd7b \
|
||||
--hash=sha256:e0be1aa6566979266a8fc845ab0e18613f4918cf2c977fe67050f5dc7e2a9a97 \
|
||||
--hash=sha256:edfb16b16a298717cf05b8c8a9ad1828b6ff3de5e93048ceccd74e6ae4ff0922
|
||||
idna==2.8 \
|
||||
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
|
||||
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
|
||||
keepkey==6.0.2 \
|
||||
--hash=sha256:3236dd701bde74768c41a92e724e322ea5e01b90985e2e6215eb85b77f9a0ae1 \
|
||||
--hash=sha256:677e07deacc2ff97bee313b8dd7ae55faebab02e7d17b9a8e49b889996a36010 \
|
||||
--hash=sha256:af107f610fb0e2417fc7a9d87a2fa22aac9b80b79559370d178be424bb85489a
|
||||
libusb1==1.7 \
|
||||
--hash=sha256:9d4f66d2ed699986b06bc3082cd262101cb26af7a76a34bd15b7eb56cba37e0f
|
||||
mnemonic==0.18 \
|
||||
--hash=sha256:02a7306a792370f4a0c106c2cf1ce5a0c84b9dbd7e71c6792fdb9ad88a727f1d
|
||||
pbkdf2==1.3 \
|
||||
--hash=sha256:ac6397369f128212c43064a2b4878038dab78dab41875364554aaf2a684e6979
|
||||
pip==19.0.1 \
|
||||
--hash=sha256:aae79c7afe895fb986ec751564f24d97df1331bb99cdfec6f70dada2f40c0044 \
|
||||
--hash=sha256:e81ddd35e361b630e94abeda4a1eddd36d47a90e71eb00f38f46b57f787cd1a5
|
||||
protobuf==3.6.1 \
|
||||
--hash=sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4 \
|
||||
--hash=sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811 \
|
||||
--hash=sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444 \
|
||||
--hash=sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96 \
|
||||
--hash=sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2 \
|
||||
--hash=sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef \
|
||||
--hash=sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e \
|
||||
--hash=sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995 \
|
||||
--hash=sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed \
|
||||
--hash=sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9 \
|
||||
--hash=sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90 \
|
||||
--hash=sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19 \
|
||||
--hash=sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625 \
|
||||
--hash=sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9 \
|
||||
--hash=sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e \
|
||||
--hash=sha256:e7a5ccf56444211d79e3204b05087c1460c212a2c7d62f948b996660d0165d68 \
|
||||
--hash=sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10
|
||||
pyaes==1.6.1 \
|
||||
--hash=sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f
|
||||
pyblake2==1.1.2 \
|
||||
--hash=sha256:3757f7ad709b0e1b2a6b3919fa79fe3261f166fc375cd521f2be480f8319dde9 \
|
||||
--hash=sha256:407e02c7f8f36fcec1b7aa114ddca0c1060c598142ea6f6759d03710b946a7e3 \
|
||||
--hash=sha256:4d47b4a2c1d292b1e460bde1dda4d13aa792ed2ed70fcc263b6bc24632c8e902 \
|
||||
--hash=sha256:5ccc7eb02edb82fafb8adbb90746af71460fbc29aa0f822526fc976dff83e93f \
|
||||
--hash=sha256:8043267fbc0b2f3748c6920591cd0b8b5609dcce60c504c32858aa36206386f2 \
|
||||
--hash=sha256:982295a87907d50f4723db6bc724660da76b6547826d52160171d54f95b919ac \
|
||||
--hash=sha256:baa2190bfe549e36163aa44664d4ee3a9080b236fc5d42f50dc6fd36bbdc749e \
|
||||
--hash=sha256:c53417ee0bbe77db852d5fd1036749f03696ebc2265de359fe17418d800196c4 \
|
||||
--hash=sha256:fbc9fcde75713930bc2a91b149e97be2401f7c9c56d735b46a109210f58d7358
|
||||
requests==2.21.0 \
|
||||
--hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
|
||||
--hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b
|
||||
safet==0.1.4 \
|
||||
--hash=sha256:522c257910f9472e9c77c487425ed286f6721c314653e232bc41c6cedece1bb1 \
|
||||
--hash=sha256:b152874acdc89ff0c8b2d680bfbf020b3e53527c2ad3404489dd61a548aa56a1
|
||||
setuptools==40.8.0 \
|
||||
--hash=sha256:6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d \
|
||||
--hash=sha256:e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab
|
||||
six==1.12.0 \
|
||||
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
|
||||
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73
|
||||
trezor==0.11.1 \
|
||||
--hash=sha256:6043f321d856e1b45b9df0c37810264f08d065bb56cd999f61a05fe2906e9e18 \
|
||||
--hash=sha256:6119b30cf9a136667753935bd06c5f341e78950b35e8ccbadaecc65c12f1946d
|
||||
typing-extensions==3.7.2 \
|
||||
--hash=sha256:07b2c978670896022a43c4b915df8958bec4a6b84add7f2c87b2b728bda3ba64 \
|
||||
--hash=sha256:f3f0e67e1d42de47b5c67c32c9b26641642e9170fe7e292991793705cd5fef7c \
|
||||
--hash=sha256:fb2cd053238d33a8ec939190f30cfd736c00653a85a2919415cecf7dc3d9da71
|
||||
urllib3==1.24.1 \
|
||||
--hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \
|
||||
--hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22
|
||||
websocket-client==0.54.0 \
|
||||
--hash=sha256:8c8bf2d4f800c3ed952df206b18c28f7070d9e3dcbd6ca6291127574f57ee786 \
|
||||
--hash=sha256:e51562c91ddb8148e791f0155fdb01325d99bb52c4cdbb291aee7a3563fd0849
|
||||
wheel==0.32.3 \
|
||||
--hash=sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6 \
|
||||
--hash=sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44
|
||||
@ -1,155 +0,0 @@
|
||||
aiohttp==3.5.4 \
|
||||
--hash=sha256:00d198585474299c9c3b4f1d5de1a576cc230d562abc5e4a0e81d71a20a6ca55 \
|
||||
--hash=sha256:0155af66de8c21b8dba4992aaeeabf55503caefae00067a3b1139f86d0ec50ed \
|
||||
--hash=sha256:09654a9eca62d1bd6d64aa44db2498f60a5c1e0ac4750953fdd79d5c88955e10 \
|
||||
--hash=sha256:199f1d106e2b44b6dacdf6f9245493c7d716b01d0b7fbe1959318ba4dc64d1f5 \
|
||||
--hash=sha256:296f30dedc9f4b9e7a301e5cc963012264112d78a1d3094cd83ef148fdf33ca1 \
|
||||
--hash=sha256:368ed312550bd663ce84dc4b032a962fcb3c7cae099dbbd48663afc305e3b939 \
|
||||
--hash=sha256:40d7ea570b88db017c51392349cf99b7aefaaddd19d2c78368aeb0bddde9d390 \
|
||||
--hash=sha256:629102a193162e37102c50713e2e31dc9a2fe7ac5e481da83e5bb3c0cee700aa \
|
||||
--hash=sha256:6d5ec9b8948c3d957e75ea14d41e9330e1ac3fed24ec53766c780f82805140dc \
|
||||
--hash=sha256:87331d1d6810214085a50749160196391a712a13336cd02ce1c3ea3d05bcf8d5 \
|
||||
--hash=sha256:9a02a04bbe581c8605ac423ba3a74999ec9d8bce7ae37977a3d38680f5780b6d \
|
||||
--hash=sha256:9c4c83f4fa1938377da32bc2d59379025ceeee8e24b89f72fcbccd8ca22dc9bf \
|
||||
--hash=sha256:9cddaff94c0135ee627213ac6ca6d05724bfe6e7a356e5e09ec57bd3249510f6 \
|
||||
--hash=sha256:a25237abf327530d9561ef751eef9511ab56fd9431023ca6f4803f1994104d72 \
|
||||
--hash=sha256:a5cbd7157b0e383738b8e29d6e556fde8726823dae0e348952a61742b21aeb12 \
|
||||
--hash=sha256:a97a516e02b726e089cffcde2eea0d3258450389bbac48cbe89e0f0b6e7b0366 \
|
||||
--hash=sha256:acc89b29b5f4e2332d65cd1b7d10c609a75b88ef8925d487a611ca788432dfa4 \
|
||||
--hash=sha256:b05bd85cc99b06740aad3629c2585bda7b83bd86e080b44ba47faf905fdf1300 \
|
||||
--hash=sha256:c2bec436a2b5dafe5eaeb297c03711074d46b6eb236d002c13c42f25c4a8ce9d \
|
||||
--hash=sha256:cc619d974c8c11fe84527e4b5e1c07238799a8c29ea1c1285149170524ba9303 \
|
||||
--hash=sha256:d4392defd4648badaa42b3e101080ae3313e8f4787cb517efd3f5b8157eaefd6 \
|
||||
--hash=sha256:e1c3c582ee11af7f63a34a46f0448fca58e59889396ffdae1f482085061a2889
|
||||
aiohttp-socks==0.2.2 \
|
||||
--hash=sha256:e473ee222b001fe33798957b9ce3352b32c187cf41684f8e2259427925914993 \
|
||||
--hash=sha256:eebd8939a7c3c1e3e7e1b2552c60039b4c65ef6b8b2351efcbdd98290538e310
|
||||
aiorpcX==0.10.4 \
|
||||
--hash=sha256:7130105d31230f069b0eea4e1893c7199cfe2d89a52a31aec718d37f4449935d \
|
||||
--hash=sha256:e6dfd584f597ee3aa6a8d4cb5755c8ffbbe42754f32728561d9e5940379d5096
|
||||
async_timeout==3.0.1 \
|
||||
--hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \
|
||||
--hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3
|
||||
attrs==18.2.0 \
|
||||
--hash=sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69 \
|
||||
--hash=sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb
|
||||
certifi==2018.11.29 \
|
||||
--hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \
|
||||
--hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
|
||||
dnspython==1.16.0 \
|
||||
--hash=sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01 \
|
||||
--hash=sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d
|
||||
ecdsa==0.13 \
|
||||
--hash=sha256:40d002cf360d0e035cf2cb985e1308d41aaa087cbfc135b2dc2d844296ea546c \
|
||||
--hash=sha256:64cf1ee26d1cde3c73c6d7d107f835fed7c6a2904aef9eac223d57ad800c43fa
|
||||
idna==2.8 \
|
||||
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
|
||||
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
|
||||
idna_ssl==1.1.0 \
|
||||
--hash=sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c
|
||||
jsonrpclib-pelix==0.4.0 \
|
||||
--hash=sha256:19c558e169a51480b39548783067ca55046b62b2409ab4559931255e12f635de \
|
||||
--hash=sha256:a966d17f2f739ee89031cf5c807d85d92db6b2715fb2b2f8a88bbfc87f468b12
|
||||
multidict==4.5.2 \
|
||||
--hash=sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f \
|
||||
--hash=sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3 \
|
||||
--hash=sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef \
|
||||
--hash=sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b \
|
||||
--hash=sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73 \
|
||||
--hash=sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc \
|
||||
--hash=sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3 \
|
||||
--hash=sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd \
|
||||
--hash=sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351 \
|
||||
--hash=sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941 \
|
||||
--hash=sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d \
|
||||
--hash=sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1 \
|
||||
--hash=sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b \
|
||||
--hash=sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a \
|
||||
--hash=sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3 \
|
||||
--hash=sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7 \
|
||||
--hash=sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0 \
|
||||
--hash=sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0 \
|
||||
--hash=sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014 \
|
||||
--hash=sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5 \
|
||||
--hash=sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036 \
|
||||
--hash=sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d \
|
||||
--hash=sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a \
|
||||
--hash=sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce \
|
||||
--hash=sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1 \
|
||||
--hash=sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a \
|
||||
--hash=sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9 \
|
||||
--hash=sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7 \
|
||||
--hash=sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b
|
||||
pip==19.0.1 \
|
||||
--hash=sha256:aae79c7afe895fb986ec751564f24d97df1331bb99cdfec6f70dada2f40c0044 \
|
||||
--hash=sha256:e81ddd35e361b630e94abeda4a1eddd36d47a90e71eb00f38f46b57f787cd1a5
|
||||
protobuf==3.6.1 \
|
||||
--hash=sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4 \
|
||||
--hash=sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811 \
|
||||
--hash=sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444 \
|
||||
--hash=sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96 \
|
||||
--hash=sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2 \
|
||||
--hash=sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef \
|
||||
--hash=sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e \
|
||||
--hash=sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995 \
|
||||
--hash=sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed \
|
||||
--hash=sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9 \
|
||||
--hash=sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90 \
|
||||
--hash=sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19 \
|
||||
--hash=sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625 \
|
||||
--hash=sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9 \
|
||||
--hash=sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e \
|
||||
--hash=sha256:e7a5ccf56444211d79e3204b05087c1460c212a2c7d62f948b996660d0165d68 \
|
||||
--hash=sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10
|
||||
pyaes==1.6.1 \
|
||||
--hash=sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f
|
||||
QDarkStyle==2.5.4 \
|
||||
--hash=sha256:3eb60922b8c4d9cedecb6897ca4c9f8a259d81bdefe5791976ccdf12432de1f0 \
|
||||
--hash=sha256:51331fc6490b38c376e6ba8d8c814320c8d2d1c2663055bc396321a7c28fa8be
|
||||
qrcode==6.1 \
|
||||
--hash=sha256:3996ee560fc39532910603704c82980ff6d4d5d629f9c3f25f34174ce8606cf5 \
|
||||
--hash=sha256:505253854f607f2abf4d16092c61d4e9d511a3b4392e60bff957a68592b04369
|
||||
setuptools==40.8.0 \
|
||||
--hash=sha256:6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d \
|
||||
--hash=sha256:e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab
|
||||
six==1.12.0 \
|
||||
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
|
||||
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73
|
||||
typing-extensions==3.7.2 \
|
||||
--hash=sha256:07b2c978670896022a43c4b915df8958bec4a6b84add7f2c87b2b728bda3ba64 \
|
||||
--hash=sha256:f3f0e67e1d42de47b5c67c32c9b26641642e9170fe7e292991793705cd5fef7c \
|
||||
--hash=sha256:fb2cd053238d33a8ec939190f30cfd736c00653a85a2919415cecf7dc3d9da71
|
||||
wheel==0.32.3 \
|
||||
--hash=sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6 \
|
||||
--hash=sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44
|
||||
yarl==1.3.0 \
|
||||
--hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \
|
||||
--hash=sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f \
|
||||
--hash=sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb \
|
||||
--hash=sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320 \
|
||||
--hash=sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842 \
|
||||
--hash=sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0 \
|
||||
--hash=sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829 \
|
||||
--hash=sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310 \
|
||||
--hash=sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4 \
|
||||
--hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \
|
||||
--hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1
|
||||
colorama==0.4.1 \
|
||||
--hash=sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d \
|
||||
--hash=sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48
|
||||
pylibscrypt==1.7.1 \
|
||||
--hash=sha256:7aa9424e211a12106c67ea884ccfe609856289372b900d3702faaf66e87f79ac
|
||||
scrypt==0.8.6 \
|
||||
--hash=sha256:85919f023148cd9fb01d75ad4e3e061928c298fa6249a0cd6cd469c4b947595e \
|
||||
--hash=sha256:4ad7188f2e42dbee2ff1cd72e3da40b170ba41847effbf0d726444f62ae60f3a \
|
||||
--hash=sha256:bc131f74a688fa09993c518ca666a2ebd4268b207e039cbab03a034228140d3e \
|
||||
--hash=sha256:232acdbc3434d2de55def8d5dbf1bc4b9bfc50da7c5741df2a6eebc4e18d3720 \
|
||||
--hash=sha256:971db040d3963ebe4b919a203fe10d7d6659951d3644066314330983dc175ed4 \
|
||||
--hash=sha256:475ac80239b3d788ae71a09c3019ca915e149aaa339adcdd1c9eef121293dc88 \
|
||||
--hash=sha256:18ccbc63d87c6f89b753194194bb37aeaf1abc517e4b989461d115c1d93ce128 \
|
||||
--hash=sha256:c23daecee405cb036845917295c76f8d747fc890158df40cb304b4b3c3640079 \
|
||||
--hash=sha256:f8239b2d47fa1d40bc27efd231dc7083695d10c1c2ac51a99380360741e0362d
|
||||
|
||||
@ -1,41 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Run this after a new release to update dependencies
|
||||
|
||||
set -e
|
||||
|
||||
venv_dir=~/.electrum-venv
|
||||
contrib=$(dirname "$0")
|
||||
|
||||
which virtualenv > /dev/null 2>&1 || { echo "Please install virtualenv" && exit 1; }
|
||||
python3 -m hashin -h > /dev/null 2>&1 || { python3 -m pip install hashin; }
|
||||
other_python=$(which python3)
|
||||
|
||||
for i in '' '-hw' '-binaries'; do
|
||||
rm -rf "$venv_dir"
|
||||
virtualenv -p $(which python3) $venv_dir
|
||||
|
||||
source $venv_dir/bin/activate
|
||||
|
||||
echo "Installing $m dependencies"
|
||||
|
||||
python -m pip install -r $contrib/requirements/requirements${i}.txt --upgrade
|
||||
|
||||
echo "OK."
|
||||
|
||||
requirements=$(pip freeze --all)
|
||||
restricted=$(echo $requirements | $other_python $contrib/deterministic-build/find_restricted_dependencies.py)
|
||||
requirements="$requirements $restricted"
|
||||
|
||||
echo "Generating package hashes..."
|
||||
rm $contrib/deterministic-build/requirements${i}.txt
|
||||
touch $contrib/deterministic-build/requirements${i}.txt
|
||||
|
||||
for requirement in $requirements; do
|
||||
echo -e "\r Hashing $requirement..."
|
||||
$other_python -m hashin -r $contrib/deterministic-build/requirements${i}.txt ${requirement}
|
||||
done
|
||||
|
||||
echo "OK."
|
||||
done
|
||||
|
||||
echo "Done. Updated requirements"
|
||||
@ -1,36 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
CONTRIB="$(dirname "$(readlink -e "$0")")"
|
||||
ROOT_FOLDER="$CONTRIB"/..
|
||||
PACKAGES="$ROOT_FOLDER"/packages/
|
||||
LOCALE="$ROOT_FOLDER"/electrum/locale/
|
||||
|
||||
if [ ! -d "$LOCALE" ]; then
|
||||
echo "Run make_locale first!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$PACKAGES" ]; then
|
||||
echo "Run make_packages first!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pushd ./electrum/gui/kivy/
|
||||
|
||||
make theming
|
||||
|
||||
if [[ -n "$1" && "$1" == "release" ]] ; then
|
||||
echo -n Keystore Password:
|
||||
read -s password
|
||||
export P4A_RELEASE_KEYSTORE=~/.keystore
|
||||
export P4A_RELEASE_KEYSTORE_PASSWD=$password
|
||||
export P4A_RELEASE_KEYALIAS_PASSWD=$password
|
||||
export P4A_RELEASE_KEYALIAS=electrum
|
||||
make release
|
||||
else
|
||||
make apk
|
||||
fi
|
||||
|
||||
pushd lib
|
||||
VERSION=$(python -c "import version; print version.ELECTRUM_VERSION")".0"
|
||||
popd
|
||||
echo $VERSION
|
||||
echo $VERSION > contrib/apk_version
|
||||
pushd ./gui/kivy/; make apk; popd
|
||||
|
||||
@ -1,30 +1,23 @@
|
||||
#!/usr/bin/python3
|
||||
import re
|
||||
import os
|
||||
#!/usr/bin/python2
|
||||
import sys
|
||||
import re
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from electrum.version import ELECTRUM_VERSION, APK_VERSION
|
||||
print("version", ELECTRUM_VERSION)
|
||||
|
||||
dirname = sys.argv[1]
|
||||
print("directory", dirname)
|
||||
|
||||
download_page = os.path.join(dirname, "panel-download.html")
|
||||
download_template = download_page + ".template"
|
||||
from versions import version, version_win, version_mac, version_android, version_apk
|
||||
from versions import download_template, download_page
|
||||
|
||||
with open(download_template) as f:
|
||||
string = f.read()
|
||||
|
||||
version = version_win = version_mac = version_android = ELECTRUM_VERSION
|
||||
string = string.replace("##VERSION##", version)
|
||||
string = string.replace("##VERSION_WIN##", version_win)
|
||||
string = string.replace("##VERSION_MAC##", version_mac)
|
||||
string = string.replace("##VERSION_ANDROID##", version_android)
|
||||
string = string.replace("##VERSION_APK##", APK_VERSION)
|
||||
string = string.replace("##VERSION_APK##", version_apk)
|
||||
|
||||
files = {
|
||||
'tgz': "Electrum-%s.tar.gz" % version,
|
||||
'appimage': "electrum-%s-x86_64.AppImage" % version,
|
||||
'zip': "Electrum-%s.zip" % version,
|
||||
'mac': "electrum-%s.dmg" % version_mac,
|
||||
'win': "electrum-%s.exe" % version_win,
|
||||
@ -50,9 +43,11 @@ for k, n in files.items():
|
||||
string = re.sub("<div id=\"%s\">(.*?)</div>"%k, '', string, flags=re.DOTALL + re.MULTILINE)
|
||||
continue
|
||||
if os.system("gpg --verify %s"%sigpath) != 0:
|
||||
raise Exception(sigpath)
|
||||
raise BaseException(sigpath)
|
||||
string = string.replace("##link_%s##"%k, link)
|
||||
|
||||
|
||||
with open(download_page,'w') as f:
|
||||
f.write(string)
|
||||
|
||||
|
||||
|
||||
@ -1,68 +1,49 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import subprocess
|
||||
import io
|
||||
import zipfile
|
||||
import sys
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError as e:
|
||||
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
|
||||
#!/usr/bin/env python2
|
||||
from StringIO import StringIO
|
||||
import os, zipfile, pycurl
|
||||
import requests
|
||||
|
||||
os.chdir(os.path.dirname(os.path.realpath(__file__)))
|
||||
os.chdir('..')
|
||||
|
||||
cmd = "find electrum -type f -name '*.py' -o -name '*.kv'"
|
||||
|
||||
files = subprocess.check_output(cmd, shell=True)
|
||||
|
||||
with open("app.fil", "wb") as f:
|
||||
f.write(files)
|
||||
|
||||
print("Found {} files to translate".format(len(files.splitlines())))
|
||||
|
||||
# Generate fresh translation template
|
||||
if not os.path.exists('electrum/locale'):
|
||||
os.mkdir('electrum/locale')
|
||||
cmd = 'xgettext -s --from-code UTF-8 --language Python --no-wrap -f app.fil --output=electrum/locale/messages.pot'
|
||||
print('Generate template')
|
||||
if not os.path.exists('lib/locale'):
|
||||
os.mkdir('lib/locale')
|
||||
cmd = 'xgettext -s --no-wrap -f app.fil --output=lib/locale/messages.pot'
|
||||
print 'Generate template'
|
||||
os.system(cmd)
|
||||
|
||||
os.chdir('electrum')
|
||||
os.chdir('lib')
|
||||
|
||||
crowdin_identifier = 'electrum'
|
||||
crowdin_file_name = 'files[electrum-client/messages.pot]'
|
||||
crowdin_file_name = 'electrum-client/messages.pot'
|
||||
locale_file_name = 'locale/messages.pot'
|
||||
|
||||
crowdin_api_key = None
|
||||
|
||||
filename = os.path.expanduser('~/.crowdin_api_key')
|
||||
if os.path.exists(filename):
|
||||
with open(filename) as f:
|
||||
crowdin_api_key = f.read().strip()
|
||||
|
||||
if os.path.exists('../contrib/crowdin_api_key.txt'):
|
||||
crowdin_api_key = open('../contrib/crowdin_api_key.txt').read().strip()
|
||||
if "crowdin_api_key" in os.environ:
|
||||
crowdin_api_key = os.environ["crowdin_api_key"]
|
||||
|
||||
if crowdin_api_key:
|
||||
# Push to Crowdin
|
||||
print('Push to Crowdin')
|
||||
print 'Push to Crowdin'
|
||||
url = ('https://api.crowdin.com/api/project/' + crowdin_identifier + '/update-file?key=' + crowdin_api_key)
|
||||
with open(locale_file_name, 'rb') as f:
|
||||
files = {crowdin_file_name: f}
|
||||
response = requests.request('POST', url, files=files)
|
||||
print("", "update-file:", "-"*20, response.text, "-"*20, sep="\n")
|
||||
c = pycurl.Curl()
|
||||
c.setopt(c.URL, url)
|
||||
c.setopt(c.POST, 1)
|
||||
fields = [('files[' + crowdin_file_name + ']', (pycurl.FORM_FILE, locale_file_name))]
|
||||
c.setopt(c.HTTPPOST, fields)
|
||||
c.perform()
|
||||
# Build translations
|
||||
print('Build translations')
|
||||
response = requests.request('GET', 'https://api.crowdin.com/api/project/' + crowdin_identifier + '/export?key=' + crowdin_api_key)
|
||||
print("", "export:", "-" * 20, response.text, "-" * 20, sep="\n")
|
||||
print 'Build translations'
|
||||
response = requests.request('GET', 'http://api.crowdin.com/api/project/' + crowdin_identifier + '/export?key=' + crowdin_api_key).content
|
||||
print response
|
||||
|
||||
# Download & unzip
|
||||
print('Download translations')
|
||||
s = requests.request('GET', 'https://crowdin.com/backend/download/project/' + crowdin_identifier + '.zip').content
|
||||
zfobj = zipfile.ZipFile(io.BytesIO(s))
|
||||
print 'Download translations'
|
||||
zfobj = zipfile.ZipFile(StringIO(requests.request('GET', 'http://crowdin.com/download/project/' + crowdin_identifier + '.zip').content))
|
||||
|
||||
print('Unzip translations')
|
||||
print 'Unzip translations'
|
||||
for name in zfobj.namelist():
|
||||
if not name.startswith('electrum-client/locale'):
|
||||
continue
|
||||
@ -70,11 +51,12 @@ for name in zfobj.namelist():
|
||||
if not os.path.exists(name[16:]):
|
||||
os.mkdir(name[16:])
|
||||
else:
|
||||
with open(name[16:], 'wb') as output:
|
||||
output.write(zfobj.read(name))
|
||||
output = open(name[16:],'w')
|
||||
output.write(zfobj.read(name))
|
||||
output.close()
|
||||
|
||||
# Convert .po to .mo
|
||||
print('Installing')
|
||||
print 'Installing'
|
||||
for lang in os.listdir('locale'):
|
||||
if lang.startswith('messages'):
|
||||
continue
|
||||
@ -83,5 +65,5 @@ for lang in os.listdir('locale'):
|
||||
if not os.path.exists(mo_dir):
|
||||
os.mkdir(mo_dir)
|
||||
cmd = 'msgfmt --output-file="%s/electrum.mo" "locale/%s/electrum.po"' % (mo_dir,lang)
|
||||
print('Installing', lang)
|
||||
print 'Installing',lang
|
||||
os.system(cmd)
|
||||
|
||||
@ -1,10 +1,62 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/python2
|
||||
|
||||
CONTRIB="$(dirname "$0")"
|
||||
test -n "$CONTRIB" -a -d "$CONTRIB" || exit
|
||||
import sys, re, shutil, os, hashlib
|
||||
import imp
|
||||
import getpass
|
||||
|
||||
rm "$CONTRIB"/../packages/ -r
|
||||
if __name__ == '__main__':
|
||||
|
||||
d = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
|
||||
os.chdir(d)
|
||||
v = imp.load_source('version', 'lib/version.py')
|
||||
version = v.ELECTRUM_VERSION
|
||||
print "version", version
|
||||
|
||||
# copy dependencies into 'packages' directory
|
||||
deps = [
|
||||
'pyaes',
|
||||
'ecdsa',
|
||||
'pbkdf2',
|
||||
'requests', # note: requests-2.5.1 is needed to build with pyinstaller
|
||||
'qrcode',
|
||||
'google/protobuf',
|
||||
'dns',
|
||||
'six',
|
||||
'jsonrpclib',
|
||||
'socks',
|
||||
]
|
||||
for module in deps:
|
||||
f, pathname, descr = imp.find_module(module)
|
||||
target = 'packages/' + module + descr[0]
|
||||
if os.path.exists(target):
|
||||
continue
|
||||
d = os.path.dirname(target)
|
||||
if d and not (os.path.exists(d)):
|
||||
os.makedirs(d)
|
||||
if descr[0]:
|
||||
shutil.copy(pathname, target)
|
||||
else:
|
||||
shutil.copytree(pathname, target, ignore=shutil.ignore_patterns('*.pyc'))
|
||||
|
||||
# fix google/__init__.py needed by pyinstaller
|
||||
n = 'packages/google/__init__.py'
|
||||
if not os.path.exists(n):
|
||||
os.system("echo \# do not remove>%s"%n)
|
||||
|
||||
# patch requests and add cacert.pem
|
||||
import requests
|
||||
crt = requests.certs.where()
|
||||
n = 'packages/requests/certs.py'
|
||||
with open(n, 'r') as f:
|
||||
s = f.read()
|
||||
s = s.replace("'%s'"%crt, "os.path.join(os.path.dirname(__file__), 'cacert.pem')")
|
||||
with open(n, 'w') as f:
|
||||
f.write(s)
|
||||
shutil.copy(crt, 'packages/requests/cacert.pem')
|
||||
|
||||
os.system("pyrcc4 icons.qrc -o gui/qt/icons_rc.py")
|
||||
os.system("python setup.py sdist --format=zip,gztar")
|
||||
|
||||
print "Packages are ready in dist"
|
||||
|
||||
#Install pure python modules in electrum directory
|
||||
python3 -m pip install -r "$CONTRIB"/deterministic-build/requirements.txt -t "$CONTRIB"/../packages
|
||||
|
||||
|
||||
@ -1,31 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
CONTRIB="$(dirname "$(readlink -e "$0")")"
|
||||
ROOT_FOLDER="$CONTRIB"/..
|
||||
PACKAGES="$ROOT_FOLDER"/packages/
|
||||
LOCALE="$ROOT_FOLDER"/electrum/locale/
|
||||
|
||||
if [ ! -d "$LOCALE" ]; then
|
||||
echo "Run make_locale first!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$PACKAGES" ]; then
|
||||
echo "Run make_packages first!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
(
|
||||
cd "$ROOT_FOLDER"
|
||||
|
||||
echo "'git clean -fx' would delete the following files: >>>"
|
||||
git clean -fx --dry-run
|
||||
echo "<<<"
|
||||
|
||||
# we could build the kivy atlas potentially?
|
||||
#(cd electrum/gui/kivy/; make theming) || echo "building kivy atlas failed! skipping."
|
||||
|
||||
python3 setup.py --quiet sdist --format=zip,gztar
|
||||
)
|
||||
@ -1 +0,0 @@
|
||||
Subproject commit 59dfc03272751cd29ee311456fa34c40f7ebb7c0
|
||||
@ -1,66 +0,0 @@
|
||||
Building Mac OS binaries
|
||||
========================
|
||||
|
||||
This guide explains how to build Electrum binaries for macOS systems.
|
||||
|
||||
|
||||
## 1. Building the binary
|
||||
|
||||
This needs to be done on a system running macOS or OS X. We use El Capitan (10.11.6) as building it
|
||||
on High Sierra (or later)
|
||||
makes the binaries [incompatible with older versions](https://github.com/pyinstaller/pyinstaller/issues/1191).
|
||||
|
||||
Before starting, make sure that the Xcode command line tools are installed (e.g. you have `git`).
|
||||
|
||||
#### 1.1a Get Xcode
|
||||
|
||||
Building the QR scanner (CalinsQRReader) requires full Xcode (not just command line tools).
|
||||
|
||||
The last Xcode version compatible with El Capitan is Xcode 8.2.1
|
||||
|
||||
Get it from [here](https://developer.apple.com/download/more/).
|
||||
|
||||
Unfortunately, you need an "Apple ID" account.
|
||||
|
||||
After downloading, uncompress it.
|
||||
|
||||
Make sure it is the "selected" xcode (e.g.):
|
||||
|
||||
sudo xcode-select -s $HOME/Downloads/Xcode.app/Contents/Developer/
|
||||
|
||||
#### 1.1b Build QR scanner separately on newer Mac
|
||||
|
||||
Alternatively, you can try building just the QR scanner on newer macOS.
|
||||
|
||||
On newer Mac, run:
|
||||
|
||||
pushd contrib/osx/CalinsQRReader; xcodebuild; popd
|
||||
cp -r contrib/osx/CalinsQRReader/build prebuilt_qr
|
||||
|
||||
Move `prebuilt_qr` to El Capitan: `contrib/osx/CalinsQRReader/prebuilt_qr`.
|
||||
|
||||
|
||||
#### 1.2 Build Electrum
|
||||
|
||||
cd electrum
|
||||
./contrib/osx/make_osx
|
||||
|
||||
This creates both a folder named Electrum.app and the .dmg file.
|
||||
|
||||
|
||||
## 2. Building the image deterministically (WIP)
|
||||
The usual way to distribute macOS applications is to use image files containing the
|
||||
application. Although these images can be created on a Mac with the built-in `hdiutil`,
|
||||
they are not deterministic.
|
||||
|
||||
Instead, we use the toolchain that Bitcoin uses: genisoimage and libdmg-hfsplus.
|
||||
These tools do not work on macOS, so you need a separate Linux machine (or VM).
|
||||
|
||||
Copy the Electrum.app directory over and install the dependencies, e.g.:
|
||||
|
||||
apt install libcap-dev cmake make gcc faketime
|
||||
|
||||
Then you can just invoke `package.sh` with the path to the app:
|
||||
|
||||
cd electrum
|
||||
./contrib/osx/package.sh ~/Electrum.app/
|
||||
@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
. $(dirname "$0")/../build_tools_util.sh
|
||||
|
||||
|
||||
function DoCodeSignMaybe { # ARGS: infoName fileOrDirName codesignIdentity
|
||||
infoName="$1"
|
||||
file="$2"
|
||||
identity="$3"
|
||||
deep=""
|
||||
if [ -z "$identity" ]; then
|
||||
# we are ok with them not passing anything; master script calls us unconditionally even if no identity is specified
|
||||
return
|
||||
fi
|
||||
if [ -d "$file" ]; then
|
||||
deep="--deep"
|
||||
fi
|
||||
if [ -z "$infoName" ] || [ -z "$file" ] || [ -z "$identity" ] || [ ! -e "$file" ]; then
|
||||
fail "Argument error to internal function DoCodeSignMaybe()"
|
||||
fi
|
||||
info "Code signing ${infoName}..."
|
||||
codesign -f -v $deep -s "$identity" "$file" || fail "Could not code sign ${infoName}"
|
||||
}
|
||||
@ -1,86 +0,0 @@
|
||||
--- cdrkit-1.1.11.old/genisoimage/tree.c 2008-10-21 19:57:47.000000000 -0400
|
||||
+++ cdrkit-1.1.11/genisoimage/tree.c 2013-12-06 00:23:18.489622668 -0500
|
||||
@@ -1139,8 +1139,9 @@
|
||||
scan_directory_tree(struct directory *this_dir, char *path,
|
||||
struct directory_entry *de)
|
||||
{
|
||||
- DIR *current_dir;
|
||||
+ int current_file;
|
||||
char whole_path[PATH_MAX];
|
||||
+ struct dirent **d_list;
|
||||
struct dirent *d_entry;
|
||||
struct directory *parent;
|
||||
int dflag;
|
||||
@@ -1164,7 +1165,8 @@
|
||||
this_dir->dir_flags |= DIR_WAS_SCANNED;
|
||||
|
||||
errno = 0; /* Paranoia */
|
||||
- current_dir = opendir(path);
|
||||
+ //current_dir = opendir(path);
|
||||
+ current_file = scandir(path, &d_list, NULL, alphasort);
|
||||
d_entry = NULL;
|
||||
|
||||
/*
|
||||
@@ -1173,12 +1175,12 @@
|
||||
*/
|
||||
old_path = path;
|
||||
|
||||
- if (current_dir) {
|
||||
+ if (current_file >= 0) {
|
||||
errno = 0;
|
||||
- d_entry = readdir(current_dir);
|
||||
+ d_entry = d_list[0];
|
||||
}
|
||||
|
||||
- if (!current_dir || !d_entry) {
|
||||
+ if (current_file < 0 || !d_entry) {
|
||||
int ret = 1;
|
||||
|
||||
#ifdef USE_LIBSCHILY
|
||||
@@ -1191,8 +1193,8 @@
|
||||
de->isorec.flags[0] &= ~ISO_DIRECTORY;
|
||||
ret = 0;
|
||||
}
|
||||
- if (current_dir)
|
||||
- closedir(current_dir);
|
||||
+ if(d_list)
|
||||
+ free(d_list);
|
||||
return (ret);
|
||||
}
|
||||
#ifdef ABORT_DEEP_ISO_ONLY
|
||||
@@ -1208,7 +1210,7 @@
|
||||
errmsgno(EX_BAD, "use Rock Ridge extensions via -R or -r,\n");
|
||||
errmsgno(EX_BAD, "or allow deep ISO9660 directory nesting via -D.\n");
|
||||
}
|
||||
- closedir(current_dir);
|
||||
+ free(d_list);
|
||||
return (1);
|
||||
}
|
||||
#endif
|
||||
@@ -1250,13 +1252,13 @@
|
||||
* The first time through, skip this, since we already asked
|
||||
* for the first entry when we opened the directory.
|
||||
*/
|
||||
- if (dflag)
|
||||
- d_entry = readdir(current_dir);
|
||||
+ if (dflag && current_file >= 0)
|
||||
+ d_entry = d_list[current_file];
|
||||
dflag++;
|
||||
|
||||
- if (!d_entry)
|
||||
+ if (current_file < 0)
|
||||
break;
|
||||
-
|
||||
+ current_file--;
|
||||
/* OK, got a valid entry */
|
||||
|
||||
/* If we do not want all files, then pitch the backups. */
|
||||
@@ -1348,7 +1350,7 @@
|
||||
insert_file_entry(this_dir, whole_path, d_entry->d_name);
|
||||
#endif /* APPLE_HYB */
|
||||
}
|
||||
- closedir(current_dir);
|
||||
+ free(d_list);
|
||||
|
||||
#ifdef APPLE_HYB
|
||||
/*
|
||||
@ -1,143 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Parameterize
|
||||
PYTHON_VERSION=3.6.4
|
||||
BUILDDIR=/tmp/electrum-build
|
||||
PACKAGE=Electrum
|
||||
GIT_REPO=https://github.com/spesmilo/electrum
|
||||
LIBSECP_VERSION="b408c6a8b287003d1ade5709e6f7bc3c7f1d5be7"
|
||||
|
||||
. $(dirname "$0")/base.sh
|
||||
|
||||
src_dir=$(dirname "$0")
|
||||
cd $src_dir/../..
|
||||
|
||||
export PYTHONHASHSEED=22
|
||||
VERSION=`git describe --tags --dirty --always`
|
||||
|
||||
which brew > /dev/null 2>&1 || fail "Please install brew from https://brew.sh/ to continue"
|
||||
which xcodebuild > /dev/null 2>&1 || fail "Please install Xcode and xcode command line tools to continue"
|
||||
|
||||
# Code Signing: See https://developer.apple.com/library/archive/documentation/Security/Conceptual/CodeSigningGuide/Procedures/Procedures.html
|
||||
APP_SIGN=""
|
||||
if [ -n "$1" ]; then
|
||||
# Test the identity is valid for signing by doing this hack. There is no other way to do this.
|
||||
cp -f /bin/ls ./CODESIGN_TEST
|
||||
codesign -s "$1" --dryrun -f ./CODESIGN_TEST > /dev/null 2>&1
|
||||
res=$?
|
||||
rm -f ./CODESIGN_TEST
|
||||
if ((res)); then
|
||||
fail "Code signing identity \"$1\" appears to be invalid."
|
||||
fi
|
||||
unset res
|
||||
APP_SIGN="$1"
|
||||
info "Code signing enabled using identity \"$APP_SIGN\""
|
||||
else
|
||||
warn "Code signing DISABLED. Specify a valid macOS Developer identity installed on the system as the first argument to this script to enable signing."
|
||||
fi
|
||||
|
||||
info "Installing Python $PYTHON_VERSION"
|
||||
export PATH="~/.pyenv/bin:~/.pyenv/shims:~/Library/Python/3.6/bin:$PATH"
|
||||
if [ -d "~/.pyenv" ]; then
|
||||
pyenv update
|
||||
else
|
||||
curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash > /dev/null 2>&1
|
||||
fi
|
||||
PYTHON_CONFIGURE_OPTS="--enable-framework" pyenv install -s $PYTHON_VERSION && \
|
||||
pyenv global $PYTHON_VERSION || \
|
||||
fail "Unable to use Python $PYTHON_VERSION"
|
||||
|
||||
|
||||
info "install dependencies specific to binaries"
|
||||
# note that this also installs pinned versions of both pip and setuptools
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements-binaries.txt --user \
|
||||
|| fail "Could not install pyinstaller"
|
||||
|
||||
|
||||
info "Installing pyinstaller"
|
||||
python3 -m pip install -I --user pyinstaller==3.4 --no-use-pep517 || fail "Could not install pyinstaller"
|
||||
|
||||
info "Using these versions for building $PACKAGE:"
|
||||
sw_vers
|
||||
python3 --version
|
||||
echo -n "Pyinstaller "
|
||||
pyinstaller --version
|
||||
|
||||
rm -rf ./dist
|
||||
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
rm -rf $BUILDDIR > /dev/null 2>&1
|
||||
mkdir $BUILDDIR
|
||||
|
||||
cp -R ./contrib/deterministic-build/electrum-locale/locale/ ./electrum/locale/
|
||||
|
||||
|
||||
info "Downloading libusb..."
|
||||
curl https://homebrew.bintray.com/bottles/libusb-1.0.22.el_capitan.bottle.tar.gz | \
|
||||
tar xz --directory $BUILDDIR
|
||||
cp $BUILDDIR/libusb/1.0.22/lib/libusb-1.0.dylib contrib/osx
|
||||
echo "82c368dfd4da017ceb32b12ca885576f325503428a4966cc09302cbd62702493 contrib/osx/libusb-1.0.dylib" | \
|
||||
shasum -a 256 -c || fail "libusb checksum mismatched"
|
||||
|
||||
info "Building libsecp256k1"
|
||||
brew install autoconf automake libtool
|
||||
git clone https://github.com/bitcoin-core/secp256k1 $BUILDDIR/secp256k1
|
||||
pushd $BUILDDIR/secp256k1
|
||||
git reset --hard $LIBSECP_VERSION
|
||||
git clean -f -x -q
|
||||
./autogen.sh
|
||||
./configure --enable-module-recovery --enable-experimental --enable-module-ecdh --disable-jni
|
||||
make
|
||||
popd
|
||||
cp $BUILDDIR/secp256k1/.libs/libsecp256k1.0.dylib contrib/osx
|
||||
|
||||
info "Building CalinsQRReader..."
|
||||
d=contrib/osx/CalinsQRReader
|
||||
pushd $d
|
||||
rm -fr build
|
||||
# prefer building using xcode ourselves. otherwise fallback to prebuilt binary
|
||||
xcodebuild || cp -r prebuilt_qr build || fail "Could not build CalinsQRReader"
|
||||
popd
|
||||
DoCodeSignMaybe "CalinsQRReader.app" "${d}/build/Release/CalinsQRReader.app" "$APP_SIGN" # If APP_SIGN is empty will be a noop
|
||||
|
||||
|
||||
info "Installing requirements..."
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements.txt --user || \
|
||||
fail "Could not install requirements"
|
||||
|
||||
info "Installing hardware wallet requirements..."
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements-hw.txt --user || \
|
||||
fail "Could not install hardware wallet requirements"
|
||||
|
||||
info "Building $PACKAGE..."
|
||||
python3 -m pip install --user . > /dev/null || fail "Could not build $PACKAGE"
|
||||
|
||||
info "Faking timestamps..."
|
||||
for d in ~/Library/Python/ ~/.pyenv .; do
|
||||
pushd $d
|
||||
find . -exec touch -t '200101220000' {} +
|
||||
popd
|
||||
done
|
||||
|
||||
info "Building binary"
|
||||
APP_SIGN="$APP_SIGN" pyinstaller --noconfirm --ascii --clean --name $VERSION contrib/osx/osx.spec || fail "Could not build binary"
|
||||
|
||||
info "Adding bitcoin URI types to Info.plist"
|
||||
plutil -insert 'CFBundleURLTypes' \
|
||||
-xml '<array><dict> <key>CFBundleURLName</key> <string>bitcoin</string> <key>CFBundleURLSchemes</key> <array><string>bitcoin</string></array> </dict></array>' \
|
||||
-- dist/$PACKAGE.app/Contents/Info.plist \
|
||||
|| fail "Could not add keys to Info.plist. Make sure the program 'plutil' exists and is installed."
|
||||
|
||||
DoCodeSignMaybe "app bundle" "dist/${PACKAGE}.app" "$APP_SIGN" # If APP_SIGN is empty will be a noop
|
||||
|
||||
info "Creating .DMG"
|
||||
hdiutil create -fs HFS+ -volname $PACKAGE -srcfolder dist/$PACKAGE.app dist/electrum-$VERSION.dmg || fail "Could not create .DMG"
|
||||
|
||||
DoCodeSignMaybe ".DMG" "dist/electrum-${VERSION}.dmg" "$APP_SIGN" # If APP_SIGN is empty will be a noop
|
||||
|
||||
if [ -z "$APP_SIGN" ]; then
|
||||
warn "App was built successfully but was not code signed. Users may get security warnings from macOS."
|
||||
warn "Specify a valid code signing identity as the first argument to this script to enable code signing."
|
||||
fi
|
||||
@ -1,162 +0,0 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, collect_dynamic_libs
|
||||
|
||||
import sys, os
|
||||
|
||||
PACKAGE='Electrum'
|
||||
PYPKG='electrum'
|
||||
MAIN_SCRIPT='run_electrum'
|
||||
ICONS_FILE=PYPKG + '/gui/icons/electrum.icns'
|
||||
APP_SIGN = os.environ.get('APP_SIGN', '')
|
||||
|
||||
def fail(*msg):
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
print("\r🗯 {}ERROR:{}".format(RED, NC), *msg)
|
||||
sys.exit(1)
|
||||
|
||||
def codesign(identity, binary):
|
||||
d = os.path.dirname(binary)
|
||||
saved_dir=None
|
||||
if d:
|
||||
# switch to directory of the binary so codesign verbose messages don't include long path
|
||||
saved_dir = os.path.abspath(os.path.curdir)
|
||||
os.chdir(d)
|
||||
binary = os.path.basename(binary)
|
||||
os.system("codesign -v -f -s '{}' '{}'".format(identity, binary))==0 or fail("Could not code sign " + binary)
|
||||
if saved_dir:
|
||||
os.chdir(saved_dir)
|
||||
|
||||
def monkey_patch_pyinstaller_for_codesigning(identity):
|
||||
# Monkey-patch PyInstaller so that we app-sign all binaries *after* they are modified by PyInstaller
|
||||
# If we app-sign before that point, the signature will be invalid because PyInstaller modifies
|
||||
# @loader_path in the Mach-O loader table.
|
||||
try:
|
||||
import PyInstaller.depend.dylib
|
||||
_saved_func = PyInstaller.depend.dylib.mac_set_relative_dylib_deps
|
||||
except (ImportError, NameError, AttributeError):
|
||||
# Hmm. Likely wrong PyInstaller version.
|
||||
fail("Could not monkey-patch PyInstaller for code signing. Please ensure that you are using PyInstaller 3.4.")
|
||||
_signed = set()
|
||||
def my_func(fn, distname):
|
||||
_saved_func(fn, distname)
|
||||
if (fn, distname) not in _signed:
|
||||
codesign(identity, fn)
|
||||
_signed.add((fn,distname)) # remember we signed it so we don't sign again
|
||||
PyInstaller.depend.dylib.mac_set_relative_dylib_deps = my_func
|
||||
|
||||
|
||||
for i, x in enumerate(sys.argv):
|
||||
if x == '--name':
|
||||
VERSION = sys.argv[i+1]
|
||||
break
|
||||
else:
|
||||
raise Exception('no version')
|
||||
|
||||
electrum = os.path.abspath(".") + "/"
|
||||
block_cipher = None
|
||||
|
||||
# see https://github.com/pyinstaller/pyinstaller/issues/2005
|
||||
hiddenimports = []
|
||||
hiddenimports += collect_submodules('trezorlib')
|
||||
hiddenimports += collect_submodules('safetlib')
|
||||
hiddenimports += collect_submodules('btchip')
|
||||
hiddenimports += collect_submodules('keepkeylib')
|
||||
hiddenimports += collect_submodules('websocket')
|
||||
hiddenimports += collect_submodules('ckcc')
|
||||
|
||||
# safetlib imports PyQt5.Qt. We use a local updated copy of pinmatrix.py until they
|
||||
# release a new version that includes https://github.com/archos-safe-t/python-safet/commit/b1eab3dba4c04fdfc1fcf17b66662c28c5f2380e
|
||||
hiddenimports.remove('safetlib.qt.pinmatrix')
|
||||
|
||||
|
||||
datas = [
|
||||
(electrum + PYPKG + '/*.json', PYPKG),
|
||||
(electrum + PYPKG + '/wordlist/english.txt', PYPKG + '/wordlist'),
|
||||
(electrum + PYPKG + '/locale', PYPKG + '/locale'),
|
||||
(electrum + PYPKG + '/plugins', PYPKG + '/plugins'),
|
||||
(electrum + PYPKG + '/gui/icons', PYPKG + '/gui/icons'),
|
||||
]
|
||||
datas += collect_data_files('trezorlib')
|
||||
datas += collect_data_files('safetlib')
|
||||
datas += collect_data_files('btchip')
|
||||
datas += collect_data_files('keepkeylib')
|
||||
datas += collect_data_files('ckcc')
|
||||
|
||||
# Add the QR Scanner helper app
|
||||
datas += [(electrum + "contrib/osx/CalinsQRReader/build/Release/CalinsQRReader.app", "./contrib/osx/CalinsQRReader/build/Release/CalinsQRReader.app")]
|
||||
|
||||
# Add libusb so Trezor and Safe-T mini will work
|
||||
binaries = [(electrum + "contrib/osx/libusb-1.0.dylib", ".")]
|
||||
binaries += [(electrum + "contrib/osx/libsecp256k1.0.dylib", ".")]
|
||||
|
||||
# Workaround for "Retro Look":
|
||||
binaries += [b for b in collect_dynamic_libs('PyQt5') if 'macstyle' in b[0]]
|
||||
|
||||
# We don't put these files in to actually include them in the script but to make the Analysis method scan them for imports
|
||||
a = Analysis([electrum+ MAIN_SCRIPT,
|
||||
electrum+'electrum/gui/qt/main_window.py',
|
||||
electrum+'electrum/gui/text.py',
|
||||
electrum+'electrum/util.py',
|
||||
electrum+'electrum/wallet.py',
|
||||
electrum+'electrum/simple_config.py',
|
||||
electrum+'electrum/bitcoin.py',
|
||||
electrum+'electrum/dnssec.py',
|
||||
electrum+'electrum/commands.py',
|
||||
electrum+'electrum/plugins/cosigner_pool/qt.py',
|
||||
electrum+'electrum/plugins/email_requests/qt.py',
|
||||
electrum+'electrum/plugins/trezor/qt.py',
|
||||
electrum+'electrum/plugins/safe_t/client.py',
|
||||
electrum+'electrum/plugins/safe_t/qt.py',
|
||||
electrum+'electrum/plugins/keepkey/qt.py',
|
||||
electrum+'electrum/plugins/ledger/qt.py',
|
||||
electrum+'electrum/plugins/coldcard/qt.py',
|
||||
],
|
||||
binaries=binaries,
|
||||
datas=datas,
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=[])
|
||||
|
||||
# http://stackoverflow.com/questions/19055089/pyinstaller-onefile-warning-pyconfig-h-when-importing-scipy-or-scipy-signal
|
||||
for d in a.datas:
|
||||
if 'pyconfig' in d[0]:
|
||||
a.datas.remove(d)
|
||||
break
|
||||
|
||||
# Strip out parts of Qt that we never use. Reduces binary size by tens of MBs. see #4815
|
||||
qt_bins2remove=('qtweb', 'qt3d', 'qtgame', 'qtdesigner', 'qtquick', 'qtlocation', 'qttest', 'qtxml')
|
||||
print("Removing Qt binaries:", *qt_bins2remove)
|
||||
for x in a.binaries.copy():
|
||||
for r in qt_bins2remove:
|
||||
if x[0].lower().startswith(r):
|
||||
a.binaries.remove(x)
|
||||
print('----> Removed x =', x)
|
||||
|
||||
# If code signing, monkey-patch in a code signing step to pyinstaller. See: https://github.com/spesmilo/electrum/issues/4994
|
||||
if APP_SIGN:
|
||||
monkey_patch_pyinstaller_for_codesigning(APP_SIGN)
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
name=PACKAGE,
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
icon=electrum+ICONS_FILE,
|
||||
console=False)
|
||||
|
||||
app = BUNDLE(exe,
|
||||
version = VERSION,
|
||||
name=PACKAGE + '.app',
|
||||
icon=electrum+ICONS_FILE,
|
||||
bundle_identifier=None,
|
||||
info_plist={
|
||||
'NSHighResolutionCapable': 'True',
|
||||
'NSSupportsAutomaticGraphicsSwitching': 'True'
|
||||
}
|
||||
)
|
||||
@ -1,88 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cdrkit_version=1.1.11
|
||||
cdrkit_download_path=http://distro.ibiblio.org/fatdog/source/600/c
|
||||
cdrkit_file_name=cdrkit-${cdrkit_version}.tar.bz2
|
||||
cdrkit_sha256_hash=b50d64c214a65b1a79afe3a964c691931a4233e2ba605d793eb85d0ac3652564
|
||||
cdrkit_patches=cdrkit-deterministic.patch
|
||||
genisoimage=genisoimage-$cdrkit_version
|
||||
|
||||
libdmg_url=https://github.com/theuni/libdmg-hfsplus
|
||||
|
||||
|
||||
export LD_PRELOAD=$(locate libfaketime.so.1)
|
||||
export FAKETIME="2000-01-22 00:00:00"
|
||||
export PATH=$PATH:~/bin
|
||||
|
||||
. $(dirname "$0")/base.sh
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Usage: $0 Electrum.app"
|
||||
exit -127
|
||||
fi
|
||||
|
||||
mkdir -p ~/bin
|
||||
|
||||
if ! which ${genisoimage} > /dev/null 2>&1; then
|
||||
mkdir -p /tmp/electrum-macos
|
||||
cd /tmp/electrum-macos
|
||||
info "Downloading cdrkit $cdrkit_version"
|
||||
wget -nc ${cdrkit_download_path}/${cdrkit_file_name}
|
||||
tar xvf ${cdrkit_file_name}
|
||||
|
||||
info "Patching genisoimage"
|
||||
cd cdrkit-${cdrkit_version}
|
||||
patch -p1 < ../cdrkit-deterministic.patch
|
||||
|
||||
info "Building genisoimage"
|
||||
cmake . -Wno-dev
|
||||
make genisoimage
|
||||
cp genisoimage/genisoimage ~/bin/${genisoimage}
|
||||
fi
|
||||
|
||||
if ! which dmg > /dev/null 2>&1; then
|
||||
mkdir -p /tmp/electrum-macos
|
||||
cd /tmp/electrum-macos
|
||||
info "Downloading libdmg"
|
||||
LD_PRELOAD= git clone ${libdmg_url}
|
||||
cd libdmg-hfsplus
|
||||
info "Building libdmg"
|
||||
cmake .
|
||||
make
|
||||
cp dmg/dmg ~/bin
|
||||
fi
|
||||
|
||||
${genisoimage} -version || fail "Unable to install genisoimage"
|
||||
dmg -|| fail "Unable to install libdmg"
|
||||
|
||||
plist=$1/Contents/Info.plist
|
||||
test -f "$plist" || fail "Info.plist not found"
|
||||
VERSION=$(grep -1 ShortVersionString $plist |tail -1|gawk 'match($0, /<string>(.*)<\/string>/, a) {print a[1]}')
|
||||
echo $VERSION
|
||||
|
||||
rm -rf /tmp/electrum-macos/image > /dev/null 2>&1
|
||||
mkdir /tmp/electrum-macos/image/
|
||||
cp -r $1 /tmp/electrum-macos/image/
|
||||
|
||||
build_dir=$(dirname "$1")
|
||||
test -n "$build_dir" -a -d "$build_dir" || exit
|
||||
cd $build_dir
|
||||
|
||||
${genisoimage} \
|
||||
-no-cache-inodes \
|
||||
-D \
|
||||
-l \
|
||||
-probe \
|
||||
-V "Electrum" \
|
||||
-no-pad \
|
||||
-r \
|
||||
-dir-mode 0755 \
|
||||
-apple \
|
||||
-o Electrum_uncompressed.dmg \
|
||||
/tmp/electrum-macos/image || fail "Unable to create uncompressed dmg"
|
||||
|
||||
dmg dmg Electrum_uncompressed.dmg electrum-$VERSION.dmg || fail "Unable to create compressed dmg"
|
||||
rm Electrum_uncompressed.dmg
|
||||
|
||||
echo "Done."
|
||||
sha256sum electrum-$VERSION.dmg
|
||||
@ -1,2 +0,0 @@
|
||||
PyQt5
|
||||
pycryptodomex
|
||||
@ -1,8 +0,0 @@
|
||||
Cython>=0.27
|
||||
trezor[hidapi]>=0.11.1
|
||||
safet[hidapi]>=0.1.0
|
||||
keepkey
|
||||
btchip-python>=0.1.26
|
||||
ckcc-protocol>=0.7.2
|
||||
websocket-client
|
||||
hidapi
|
||||
@ -1,3 +0,0 @@
|
||||
tox
|
||||
python-coveralls
|
||||
tox-travis
|
||||
@ -1,12 +0,0 @@
|
||||
pyaes>=0.1a1
|
||||
ecdsa>=0.9
|
||||
qrcode
|
||||
protobuf
|
||||
dnspython
|
||||
jsonrpclib-pelix
|
||||
qdarkstyle<2.6
|
||||
aiorpcx>=0.9,<0.11
|
||||
aiohttp>=3.3.0
|
||||
aiohttp_socks
|
||||
certifi
|
||||
pylibscrypt==1.7.1
|
||||
@ -1,6 +1,7 @@
|
||||
#!/usr/bin/python2
|
||||
|
||||
import os
|
||||
import sys, re, shutil, os, hashlib
|
||||
import imp
|
||||
import getpass
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
version=`python3 -c "import electrum; print(electrum.version.ELECTRUM_VERSION)"`
|
||||
sig=`./run_electrum -w $SIGNING_WALLET signmessage $SIGNING_ADDRESS $version`
|
||||
echo "{ \"version\":\"$version\", \"signatures\":{ \"$SIGNING_ADDRESS\":\"$sig\"}}"
|
||||
@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
version=`git describe --tags`
|
||||
echo $version
|
||||
|
||||
here=$(dirname "$0")
|
||||
cd $here/../dist
|
||||
|
||||
sftp -oBatchMode=no -b - thomasv@download.electrum.org << !
|
||||
cd electrum-downloads
|
||||
mkdir $version
|
||||
cd $version
|
||||
mput *
|
||||
bye
|
||||
!
|
||||
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python2
|
||||
# -*- mode: python -*-
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
@ -23,28 +23,38 @@
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
MIN_PYTHON_VERSION = "3.6.1" # FIXME duplicated from setup.py
|
||||
_min_python_version_tuple = tuple(map(int, (MIN_PYTHON_VERSION.split("."))))
|
||||
|
||||
|
||||
if sys.version_info[:3] < _min_python_version_tuple:
|
||||
sys.exit("Error: Electrum requires Python version >= %s..." % MIN_PYTHON_VERSION)
|
||||
# from https://gist.github.com/tito/09c42fb4767721dc323d
|
||||
import threading
|
||||
try:
|
||||
import jnius
|
||||
except:
|
||||
jnius = None
|
||||
if jnius:
|
||||
orig_thread_run = threading.Thread.run
|
||||
def thread_check_run(*args, **kwargs):
|
||||
try:
|
||||
return orig_thread_run(*args, **kwargs)
|
||||
finally:
|
||||
jnius.detach()
|
||||
threading.Thread.run = thread_check_run
|
||||
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
is_bundle = getattr(sys, 'frozen', False)
|
||||
is_local = not is_bundle and os.path.exists(os.path.join(script_dir, "electrum.desktop"))
|
||||
is_local = not is_bundle and os.path.exists(os.path.join(script_dir, "setup-release.py"))
|
||||
is_android = 'ANDROID_DATA' in os.environ
|
||||
|
||||
# move this back to gui/kivy/__init.py once plugins are moved
|
||||
os.environ['KIVY_DATA_DIR'] = os.path.abspath(os.path.dirname(__file__)) + '/electrum/gui/kivy/data/'
|
||||
os.environ['KIVY_DATA_DIR'] = os.path.abspath(os.path.dirname(__file__)) + '/gui/kivy/data/'
|
||||
|
||||
if is_local or is_android:
|
||||
sys.path.insert(0, os.path.join(script_dir, 'packages'))
|
||||
elif is_bundle and sys.platform=='darwin':
|
||||
sys.path.insert(0, os.getcwd() + "/lib/python2.7/packages")
|
||||
|
||||
|
||||
def check_imports():
|
||||
@ -53,37 +63,49 @@ def check_imports():
|
||||
import dns
|
||||
import pyaes
|
||||
import ecdsa
|
||||
import certifi
|
||||
import requests
|
||||
import six
|
||||
import qrcode
|
||||
import pbkdf2
|
||||
import google.protobuf
|
||||
import jsonrpclib
|
||||
import aiorpcx
|
||||
except ImportError as e:
|
||||
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
|
||||
sys.exit("Error: %s. Try 'sudo pip install <module-name>'"%e.message)
|
||||
# the following imports are for pyinstaller
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import message
|
||||
from google.protobuf import reflection
|
||||
from google.protobuf import descriptor_pb2
|
||||
from jsonrpclib import SimpleJSONRPCServer
|
||||
# check that we have the correct version of ecdsa
|
||||
try:
|
||||
from ecdsa.ecdsa import curve_secp256k1, generator_secp256k1
|
||||
except Exception:
|
||||
sys.exit("cannot import ecdsa.curve_secp256k1. You probably need to upgrade ecdsa.\nTry: sudo pip install --upgrade ecdsa")
|
||||
# make sure that certificates are here
|
||||
assert os.path.exists(certifi.where())
|
||||
assert os.path.exists(requests.utils.DEFAULT_CA_BUNDLE_PATH)
|
||||
|
||||
|
||||
if not is_android:
|
||||
check_imports()
|
||||
|
||||
# load local module as electrum
|
||||
if is_local or is_android:
|
||||
import imp
|
||||
imp.load_module('electrum', *imp.find_module('lib'))
|
||||
imp.load_module('electrum_gui', *imp.find_module('gui'))
|
||||
imp.load_module('electrum_plugins', *imp.find_module('plugins'))
|
||||
|
||||
from electrum import util
|
||||
from electrum import constants
|
||||
from electrum import SimpleConfig
|
||||
from electrum.wallet import Wallet
|
||||
from electrum.storage import WalletStorage, get_derivation_used_for_hw_device_encryption
|
||||
from electrum.util import print_msg, print_stderr, json_encode, json_decode, UserCancelled
|
||||
from electrum.util import set_verbosity, InvalidPassword
|
||||
from electrum import bitcoin, network
|
||||
from electrum import SimpleConfig, Network
|
||||
from electrum.wallet import Wallet, Imported_Wallet
|
||||
from electrum.storage import WalletStorage
|
||||
from electrum.util import print_msg, print_stderr, json_encode, json_decode
|
||||
from electrum.util import set_verbosity, InvalidPassword, check_www_dir
|
||||
from electrum.commands import get_parser, known_commands, Commands, config_variables
|
||||
from electrum import daemon
|
||||
from electrum import keystore
|
||||
from electrum.mnemonic import Mnemonic
|
||||
|
||||
# get password routine
|
||||
def prompt_password(prompt, confirm=True):
|
||||
@ -98,6 +120,71 @@ def prompt_password(prompt, confirm=True):
|
||||
return password
|
||||
|
||||
|
||||
|
||||
def run_non_RPC(config):
|
||||
cmdname = config.get('cmd')
|
||||
|
||||
storage = WalletStorage(config.get_wallet_path())
|
||||
if storage.file_exists():
|
||||
sys.exit("Error: Remove the existing wallet first!")
|
||||
|
||||
def password_dialog():
|
||||
return prompt_password("Password (hit return if you do not wish to encrypt your wallet):")
|
||||
|
||||
if cmdname == 'restore':
|
||||
text = config.get('text').strip()
|
||||
passphrase = config.get('passphrase', '')
|
||||
password = password_dialog() if keystore.is_private(text) else None
|
||||
if keystore.is_address_list(text):
|
||||
wallet = Imported_Wallet(storage)
|
||||
for x in text.split():
|
||||
wallet.import_address(x)
|
||||
else:
|
||||
if keystore.is_seed(text):
|
||||
k = keystore.from_seed(text, passphrase)
|
||||
elif keystore.is_any_key(text):
|
||||
k = keystore.from_keys(text)
|
||||
else:
|
||||
sys.exit("Error: Seed or key not recognized")
|
||||
if password:
|
||||
k.update_password(None, password)
|
||||
storage.put('keystore', k.dump())
|
||||
storage.put('wallet_type', 'standard')
|
||||
storage.put('use_encryption', bool(password))
|
||||
storage.write()
|
||||
wallet = Wallet(storage)
|
||||
if not config.get('offline'):
|
||||
network = Network(config)
|
||||
network.start()
|
||||
wallet.start_threads(network)
|
||||
print_msg("Recovering wallet...")
|
||||
wallet.synchronize()
|
||||
wallet.wait_until_synchronized()
|
||||
msg = "Recovery successful" if wallet.is_found() else "Found no history for this wallet"
|
||||
else:
|
||||
msg = "This wallet was restored offline. It may contain more addresses than displayed."
|
||||
print_msg(msg)
|
||||
|
||||
elif cmdname == 'create':
|
||||
password = password_dialog()
|
||||
passphrase = config.get('passphrase', '')
|
||||
seed = Mnemonic('en').make_seed()
|
||||
k = keystore.from_seed(seed, passphrase)
|
||||
k.update_password(None, password)
|
||||
storage.put('keystore', k.dump())
|
||||
storage.put('wallet_type', 'standard')
|
||||
storage.put('use_encryption', bool(password))
|
||||
storage.write()
|
||||
wallet = Wallet(storage)
|
||||
wallet.synchronize()
|
||||
print_msg("Your wallet generation seed is:\n\"%s\"" % seed)
|
||||
print_msg("Please keep it in a safe place; if you lose it, you will not be able to restore your wallet.")
|
||||
|
||||
wallet.storage.write()
|
||||
print_msg("Wallet saved in '%s'" % wallet.storage.path)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def init_daemon(config_options):
|
||||
config = SimpleConfig(config_options)
|
||||
storage = WalletStorage(config.get_wallet_path())
|
||||
@ -106,10 +193,7 @@ def init_daemon(config_options):
|
||||
print_msg("Type 'electrum create' to create a new wallet, or provide a path to a wallet with the -w option")
|
||||
sys.exit(0)
|
||||
if storage.is_encrypted():
|
||||
if storage.is_encrypted_with_hw_device():
|
||||
plugins = init_plugins(config, 'cmdline')
|
||||
password = get_password_for_hw_device_encrypted_storage(plugins)
|
||||
elif config.get('password'):
|
||||
if config.get('password'):
|
||||
password = config.get('password')
|
||||
else:
|
||||
password = prompt_password('Password:', False)
|
||||
@ -136,7 +220,7 @@ def init_cmdline(config_options, server):
|
||||
if cmdname in ['payto', 'paytomany'] and config.get('broadcast'):
|
||||
cmd.requires_network = True
|
||||
|
||||
# instantiate wallet for command-line
|
||||
# instanciate wallet for command-line
|
||||
storage = WalletStorage(config.get_wallet_path())
|
||||
|
||||
if cmd.requires_wallet and not storage.file_exists():
|
||||
@ -153,10 +237,7 @@ def init_cmdline(config_options, server):
|
||||
# commands needing password
|
||||
if (cmd.requires_wallet and storage.is_encrypted() and server is None)\
|
||||
or (cmd.requires_password and (storage.get('use_encryption') or storage.is_encrypted())):
|
||||
if storage.is_encrypted_with_hw_device():
|
||||
# this case is handled later in the control flow
|
||||
password = None
|
||||
elif config.get('password'):
|
||||
if config.get('password'):
|
||||
password = config.get('password')
|
||||
else:
|
||||
password = prompt_password('Password:', False)
|
||||
@ -166,118 +247,65 @@ def init_cmdline(config_options, server):
|
||||
else:
|
||||
password = None
|
||||
|
||||
config_options['password'] = config_options.get('password') or password
|
||||
config_options['password'] = password
|
||||
|
||||
if cmd.name == 'password':
|
||||
new_password = prompt_password('New password:')
|
||||
config_options['new_password'] = new_password
|
||||
|
||||
|
||||
def get_connected_hw_devices(plugins):
|
||||
supported_plugins = plugins.get_hardware_support()
|
||||
# scan devices
|
||||
devices = []
|
||||
devmgr = plugins.device_manager
|
||||
for splugin in supported_plugins:
|
||||
name, plugin = splugin.name, splugin.plugin
|
||||
if not plugin:
|
||||
e = splugin.exception
|
||||
print_stderr(f"{name}: error during plugin init: {repr(e)}")
|
||||
continue
|
||||
try:
|
||||
u = devmgr.unpaired_device_infos(None, plugin)
|
||||
except:
|
||||
devmgr.print_error(f'error getting device infos for {name}: {e}')
|
||||
continue
|
||||
devices += list(map(lambda x: (name, x), u))
|
||||
return devices
|
||||
return cmd, password
|
||||
|
||||
|
||||
def get_password_for_hw_device_encrypted_storage(plugins):
|
||||
devices = get_connected_hw_devices(plugins)
|
||||
if len(devices) == 0:
|
||||
print_msg("Error: No connected hw device found. Cannot decrypt this wallet.")
|
||||
sys.exit(1)
|
||||
elif len(devices) > 1:
|
||||
print_msg("Warning: multiple hardware devices detected. "
|
||||
"The first one will be used to decrypt the wallet.")
|
||||
# FIXME we use the "first" device, in case of multiple ones
|
||||
name, device_info = devices[0]
|
||||
plugin = plugins.get_plugin(name)
|
||||
derivation = get_derivation_used_for_hw_device_encryption()
|
||||
try:
|
||||
xpub = plugin.get_xpub(device_info.device.id_, derivation, 'standard', plugin.handler)
|
||||
except UserCancelled:
|
||||
sys.exit(0)
|
||||
password = keystore.Xpub.get_pubkey_from_xpub(xpub, ())
|
||||
return password
|
||||
|
||||
|
||||
def run_offline_command(config, config_options, plugins):
|
||||
def run_offline_command(config, config_options):
|
||||
cmdname = config.get('cmd')
|
||||
cmd = known_commands[cmdname]
|
||||
password = config_options.get('password')
|
||||
if cmd.requires_wallet:
|
||||
storage = WalletStorage(config.get_wallet_path())
|
||||
if storage.is_encrypted():
|
||||
if storage.is_encrypted_with_hw_device():
|
||||
password = get_password_for_hw_device_encrypted_storage(plugins)
|
||||
config_options['password'] = password
|
||||
storage.decrypt(password)
|
||||
wallet = Wallet(storage)
|
||||
else:
|
||||
wallet = None
|
||||
# check password
|
||||
if cmd.requires_password and wallet.has_password():
|
||||
if cmd.requires_password and storage.get('use_encryption'):
|
||||
try:
|
||||
wallet.check_password(password)
|
||||
seed = wallet.check_password(password)
|
||||
except InvalidPassword:
|
||||
print_msg("Error: This password does not decode this wallet.")
|
||||
sys.exit(1)
|
||||
if cmd.requires_network:
|
||||
print_msg("Warning: running command offline")
|
||||
# arguments passed to function
|
||||
args = [config.get(x) for x in cmd.params]
|
||||
args = map(lambda x: config.get(x), cmd.params)
|
||||
# decode json arguments
|
||||
if cmdname not in ('setconfig',):
|
||||
args = list(map(json_decode, args))
|
||||
args = map(json_decode, args)
|
||||
# options
|
||||
kwargs = {}
|
||||
for x in cmd.options:
|
||||
kwargs[x] = (config_options.get(x) if x in ['password', 'new_password'] else config.get(x))
|
||||
cmd_runner = Commands(config, wallet, None)
|
||||
args += map(lambda x: config.get(x), cmd.options)
|
||||
cmd_runner = Commands(config, wallet, None,
|
||||
password=config_options.get('password'),
|
||||
new_password=config_options.get('new_password'))
|
||||
func = getattr(cmd_runner, cmd.name)
|
||||
result = func(*args, **kwargs)
|
||||
result = func(*args)
|
||||
# save wallet
|
||||
if wallet:
|
||||
wallet.storage.write()
|
||||
return result
|
||||
|
||||
|
||||
def init_plugins(config, gui_name):
|
||||
from electrum.plugin import Plugins
|
||||
return Plugins(config, gui_name)
|
||||
|
||||
from electrum.plugins import Plugins
|
||||
return Plugins(config, is_bundle or is_local or is_android, gui_name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# The hook will only be used in the Qt GUI right now
|
||||
util.setup_thread_excepthook()
|
||||
# on macOS, delete Process Serial Number arg generated for apps launched in Finder
|
||||
sys.argv = list(filter(lambda x: not x.startswith('-psn'), sys.argv))
|
||||
|
||||
# on osx, delete Process Serial Number arg generated for apps launched in Finder
|
||||
sys.argv = filter(lambda x: not x.startswith('-psn'), sys.argv)
|
||||
|
||||
# old 'help' syntax
|
||||
if len(sys.argv) > 1 and sys.argv[1] == 'help':
|
||||
if len(sys.argv)>1 and sys.argv[1] == 'help':
|
||||
sys.argv.remove('help')
|
||||
sys.argv.append('-h')
|
||||
|
||||
# old '-v' syntax
|
||||
try:
|
||||
i = sys.argv.index('-v')
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
sys.argv[i] = '-v*'
|
||||
|
||||
# read arguments from stdin pipe and prompt
|
||||
for i, arg in enumerate(sys.argv):
|
||||
if arg == '-':
|
||||
@ -285,9 +313,9 @@ if __name__ == '__main__':
|
||||
sys.argv[i] = sys.stdin.read()
|
||||
break
|
||||
else:
|
||||
raise Exception('Cannot get argument from stdin')
|
||||
raise BaseException('Cannot get argument from stdin')
|
||||
elif arg == '?':
|
||||
sys.argv[i] = input("Enter argument:")
|
||||
sys.argv[i] = raw_input("Enter argument:")
|
||||
elif arg == ':':
|
||||
sys.argv[i] = prompt_password('Enter argument (will not echo):', False)
|
||||
|
||||
@ -298,28 +326,25 @@ if __name__ == '__main__':
|
||||
# config is an object passed to the various constructors (wallet, interface, gui)
|
||||
if is_android:
|
||||
config_options = {
|
||||
'verbosity': '',
|
||||
'verbose': True,
|
||||
'cmd': 'gui',
|
||||
'gui': 'kivy',
|
||||
}
|
||||
else:
|
||||
config_options = args.__dict__
|
||||
f = lambda key: config_options[key] is not None and key not in config_variables.get(args.cmd, {}).keys()
|
||||
config_options = {key: config_options[key] for key in filter(f, config_options.keys())}
|
||||
for k, v in config_options.items():
|
||||
if v is None or (k in config_variables.get(args.cmd, {}).keys()):
|
||||
config_options.pop(k)
|
||||
if config_options.get('server'):
|
||||
config_options['auto_connect'] = False
|
||||
|
||||
config_options['cwd'] = os.getcwd()
|
||||
|
||||
# fixme: this can probably be achieved with a runtime hook (pyinstaller)
|
||||
if is_bundle and os.path.exists(os.path.join(sys._MEIPASS, 'is_portable')):
|
||||
config_options['portable'] = True
|
||||
|
||||
if config_options.get('portable'):
|
||||
config_options['electrum_path'] = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'electrum_data')
|
||||
|
||||
# kivy sometimes freezes when we write to sys.stderr
|
||||
set_verbosity(config_options.get('verbosity') if config_options.get('gui') != 'kivy' else '')
|
||||
set_verbosity(config_options.get('verbose') and config_options.get('gui')!='kivy')
|
||||
|
||||
# check uri
|
||||
uri = config_options.get('url')
|
||||
@ -334,17 +359,24 @@ if __name__ == '__main__':
|
||||
cmdname = config.get('cmd')
|
||||
|
||||
if config.get('testnet'):
|
||||
constants.set_testnet()
|
||||
elif config.get('regtest'):
|
||||
constants.set_regtest()
|
||||
elif config.get('simnet'):
|
||||
constants.set_simnet()
|
||||
bitcoin.set_testnet()
|
||||
network.set_testnet()
|
||||
|
||||
if config.get('nolnet'):
|
||||
bitcoin.set_nolnet()
|
||||
network.set_nolnet()
|
||||
|
||||
# run non-RPC commands separately
|
||||
if cmdname in ['create', 'restore']:
|
||||
run_non_RPC(config)
|
||||
sys.exit(0)
|
||||
|
||||
if cmdname == 'gui':
|
||||
fd, server = daemon.get_fd_or_server(config)
|
||||
if fd is not None:
|
||||
plugins = init_plugins(config, config.get('gui', 'qt'))
|
||||
d = daemon.Daemon(config, fd)
|
||||
d.start()
|
||||
d.init_gui(config, plugins)
|
||||
sys.exit(0)
|
||||
else:
|
||||
@ -365,15 +397,12 @@ if __name__ == '__main__':
|
||||
sys.exit(0)
|
||||
init_plugins(config, 'cmdline')
|
||||
d = daemon.Daemon(config, fd)
|
||||
d.start()
|
||||
if config.get('websocket_server'):
|
||||
from electrum import websockets
|
||||
websockets.WebSocketServer(config, d.network)
|
||||
websockets.WebSocketServer(config, d.network).start()
|
||||
if config.get('requests_dir'):
|
||||
path = os.path.join(config.get('requests_dir'), 'index.html')
|
||||
if not os.path.exists(path):
|
||||
print("Requests directory not configured.")
|
||||
print("You can configure it using https://github.com/spesmilo/electrum-merchant")
|
||||
sys.exit(1)
|
||||
check_www_dir(config.get('requests_dir'))
|
||||
d.join()
|
||||
sys.exit(0)
|
||||
else:
|
||||
@ -397,10 +426,11 @@ if __name__ == '__main__':
|
||||
print_msg("Daemon not running; try 'electrum daemon start'")
|
||||
sys.exit(1)
|
||||
else:
|
||||
plugins = init_plugins(config, 'cmdline')
|
||||
result = run_offline_command(config, config_options, plugins)
|
||||
# print result
|
||||
if isinstance(result, str):
|
||||
init_plugins(config, 'cmdline')
|
||||
result = run_offline_command(config, config_options)
|
||||
|
||||
# print result
|
||||
if type(result) in [str, unicode]:
|
||||
print_msg(result)
|
||||
elif type(result) is dict and result.get('error'):
|
||||
print_stderr(result.get('error'))
|
||||
11
electrum-env
Executable file → Normal file
@ -9,19 +9,16 @@
|
||||
# python-qt and its dependencies will still need to be installed with
|
||||
# your package manager.
|
||||
|
||||
PYTHON_VER="$(python3 -c 'import sys; print(sys.version[:3])')"
|
||||
|
||||
cd $(dirname $0)
|
||||
if [ -e ./env/bin/activate ]; then
|
||||
source ./env/bin/activate
|
||||
else
|
||||
virtualenv env -p `which python3`
|
||||
virtualenv env
|
||||
source ./env/bin/activate
|
||||
python3 -m pip install .[fast]
|
||||
python setup.py install
|
||||
fi
|
||||
|
||||
export PYTHONPATH="/usr/local/lib/python${PYTHON_VER}/site-packages:$PYTHONPATH"
|
||||
export PYTHONPATH="/usr/local/lib/python2.7/site-packages:$PYTHONPATH"
|
||||
|
||||
./run_electrum "$@"
|
||||
./electrum "$@"
|
||||
|
||||
deactivate
|
||||
|
||||
16
electrum.conf.sample
Normal file
@ -0,0 +1,16 @@
|
||||
# Configuration file for the electrum client
|
||||
# Settings defined here are shared across wallets
|
||||
#
|
||||
# copy this file to /etc/electrum.conf if you want read-only settings
|
||||
|
||||
[client]
|
||||
server = electrum.novit.ro:50001:t
|
||||
proxy = None
|
||||
gap_limit = 5
|
||||
# booleans use python syntax
|
||||
use_change = True
|
||||
gui = qt
|
||||
num_zeros = 2
|
||||
# default transaction fee is in Satoshis
|
||||
fee = 10000
|
||||
winpos-qt = [799, 226, 877, 435]
|
||||
@ -1,22 +1,17 @@
|
||||
# If you want Electrum to appear in a Linux app launcher ("start menu"), install this by doing:
|
||||
# If you want electrum to appear in a linux app launcher ("start menu"), install this by doing:
|
||||
# sudo desktop-file-install electrum.desktop
|
||||
|
||||
[Desktop Entry]
|
||||
Comment=Lightweight Bitcoin Client
|
||||
Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\"; electrum %u"
|
||||
Exec=electrum %u
|
||||
GenericName[en_US]=Bitcoin Wallet
|
||||
GenericName=Bitcoin Wallet
|
||||
Icon=electrum
|
||||
Name[en_US]=Electrum Bitcoin Wallet
|
||||
Name=Electrum Bitcoin Wallet
|
||||
Categories=Finance;Network;
|
||||
StartupNotify=true
|
||||
StartupWMClass=electrum
|
||||
StartupNotify=false
|
||||
Terminal=false
|
||||
Type=Application
|
||||
MimeType=x-scheme-handler/bitcoin;
|
||||
Actions=Testnet;
|
||||
|
||||
[Desktop Action Testnet]
|
||||
Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\"; electrum --testnet %u"
|
||||
Name=Testnet mode
|
||||
|
||||
BIN
electrum.icns
Normal file
@ -1,17 +0,0 @@
|
||||
from .version import ELECTRUM_VERSION
|
||||
from .util import format_satoshis, print_msg, print_error, set_verbosity
|
||||
from .wallet import Wallet
|
||||
from .storage import WalletStorage
|
||||
from .coinchooser import COIN_CHOOSERS
|
||||
from .network import Network, pick_random_server
|
||||
from .interface import Interface
|
||||
from .simple_config import SimpleConfig, get_config, set_config
|
||||
from . import bitcoin
|
||||
from . import transaction
|
||||
from . import daemon
|
||||
from .transaction import Transaction
|
||||
from .plugin import BasePlugin
|
||||
from .commands import Commands, known_commands
|
||||
|
||||
|
||||
__version__ = ELECTRUM_VERSION
|
||||
@ -1,877 +0,0 @@
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2018 The Electrum Developers
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import threading
|
||||
import asyncio
|
||||
import itertools
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Dict, Optional
|
||||
|
||||
from . import bitcoin
|
||||
from .bitcoin import COINBASE_MATURITY, TYPE_ADDRESS, TYPE_PUBKEY
|
||||
from .util import PrintError, profiler, bfh, TxMinedInfo
|
||||
from .transaction import Transaction, TxOutput
|
||||
from .synchronizer import Synchronizer
|
||||
from .verifier import SPV
|
||||
from .blockchain import hash_header
|
||||
from .i18n import _
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .storage import WalletStorage
|
||||
from .network import Network
|
||||
|
||||
|
||||
TX_HEIGHT_LOCAL = -2
|
||||
TX_HEIGHT_UNCONF_PARENT = -1
|
||||
TX_HEIGHT_UNCONFIRMED = 0
|
||||
|
||||
class AddTransactionException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UnrelatedTransactionException(AddTransactionException):
|
||||
def __str__(self):
|
||||
return _("Transaction is unrelated to this wallet.")
|
||||
|
||||
|
||||
class AddressSynchronizer(PrintError):
|
||||
"""
|
||||
inherited by wallet
|
||||
"""
|
||||
|
||||
def __init__(self, storage: 'WalletStorage'):
|
||||
self.storage = storage
|
||||
self.network = None # type: Network
|
||||
# verifier (SPV) and synchronizer are started in start_network
|
||||
self.synchronizer = None # type: Synchronizer
|
||||
self.verifier = None # type: SPV
|
||||
# locks: if you need to take multiple ones, acquire them in the order they are defined here!
|
||||
self.lock = threading.RLock()
|
||||
self.transaction_lock = threading.RLock()
|
||||
# address -> list(txid, height)
|
||||
self.history = storage.get('addr_history',{})
|
||||
# Verified transactions. txid -> TxMinedInfo. Access with self.lock.
|
||||
verified_tx = storage.get('verified_tx3', {})
|
||||
self.verified_tx = {} # type: Dict[str, TxMinedInfo]
|
||||
for txid, (height, timestamp, txpos, header_hash, flodata) in verified_tx.items():
|
||||
self.verified_tx[txid] = TxMinedInfo(height=height,
|
||||
conf=None,
|
||||
timestamp=timestamp,
|
||||
txpos=txpos,
|
||||
header_hash=header_hash,
|
||||
flodata=flodata)
|
||||
# Transactions pending verification. txid -> tx_height. Access with self.lock.
|
||||
self.unverified_tx = defaultdict(int)
|
||||
# true when synchronized
|
||||
self.up_to_date = False
|
||||
# thread local storage for caching stuff
|
||||
self.threadlocal_cache = threading.local()
|
||||
|
||||
self.load_and_cleanup()
|
||||
|
||||
def with_transaction_lock(func):
|
||||
def func_wrapper(self, *args, **kwargs):
|
||||
with self.transaction_lock:
|
||||
return func(self, *args, **kwargs)
|
||||
return func_wrapper
|
||||
|
||||
def load_and_cleanup(self):
|
||||
self.load_transactions()
|
||||
self.load_local_history()
|
||||
self.check_history()
|
||||
self.load_unverified_transactions()
|
||||
self.remove_local_transactions_we_dont_have()
|
||||
|
||||
def is_mine(self, address):
|
||||
return address in self.history
|
||||
|
||||
def get_addresses(self):
|
||||
return sorted(self.history.keys())
|
||||
|
||||
def get_address_history(self, addr):
|
||||
h = []
|
||||
# we need self.transaction_lock but get_tx_height will take self.lock
|
||||
# so we need to take that too here, to enforce order of locks
|
||||
with self.lock, self.transaction_lock:
|
||||
related_txns = self._history_local.get(addr, set())
|
||||
for tx_hash in related_txns:
|
||||
tx_height = self.get_tx_height(tx_hash).height
|
||||
h.append((tx_hash, tx_height))
|
||||
return h
|
||||
|
||||
def get_address_history_len(self, addr: str) -> int:
|
||||
"""Return number of transactions where address is involved."""
|
||||
return len(self._history_local.get(addr, ()))
|
||||
|
||||
def get_txin_address(self, txi):
|
||||
addr = txi.get('address')
|
||||
if addr and addr != "(pubkey)":
|
||||
return addr
|
||||
prevout_hash = txi.get('prevout_hash')
|
||||
prevout_n = txi.get('prevout_n')
|
||||
dd = self.txo.get(prevout_hash, {})
|
||||
for addr, l in dd.items():
|
||||
for n, v, is_cb in l:
|
||||
if n == prevout_n:
|
||||
return addr
|
||||
return None
|
||||
|
||||
def get_txout_address(self, txo: TxOutput):
|
||||
if txo.type == TYPE_ADDRESS:
|
||||
addr = txo.address
|
||||
elif txo.type == TYPE_PUBKEY:
|
||||
addr = bitcoin.public_key_to_p2pkh(bfh(txo.address))
|
||||
else:
|
||||
addr = None
|
||||
return addr
|
||||
|
||||
def load_unverified_transactions(self):
|
||||
# review transactions that are in the history
|
||||
for addr, hist in self.history.items():
|
||||
for tx_hash, tx_height in hist:
|
||||
# add it in case it was previously unconfirmed
|
||||
self.add_unverified_tx(tx_hash, tx_height)
|
||||
|
||||
def start_network(self, network):
|
||||
self.network = network
|
||||
if self.network is not None:
|
||||
self.synchronizer = Synchronizer(self)
|
||||
self.verifier = SPV(self.network, self)
|
||||
|
||||
def stop_threads(self, write_to_disk=True):
|
||||
if self.network:
|
||||
if self.synchronizer:
|
||||
asyncio.run_coroutine_threadsafe(self.synchronizer.stop(), self.network.asyncio_loop)
|
||||
self.synchronizer = None
|
||||
if self.verifier:
|
||||
asyncio.run_coroutine_threadsafe(self.verifier.stop(), self.network.asyncio_loop)
|
||||
self.verifier = None
|
||||
self.storage.put('stored_height', self.get_local_height())
|
||||
if write_to_disk:
|
||||
self.save_transactions()
|
||||
self.save_verified_tx()
|
||||
self.storage.write()
|
||||
|
||||
def add_address(self, address):
|
||||
if address not in self.history:
|
||||
self.history[address] = []
|
||||
self.set_up_to_date(False)
|
||||
if self.synchronizer:
|
||||
self.synchronizer.add(address)
|
||||
|
||||
def get_conflicting_transactions(self, tx_hash, tx):
|
||||
"""Returns a set of transaction hashes from the wallet history that are
|
||||
directly conflicting with tx, i.e. they have common outpoints being
|
||||
spent with tx. If the tx is already in wallet history, that will not be
|
||||
reported as a conflict.
|
||||
"""
|
||||
conflicting_txns = set()
|
||||
with self.transaction_lock:
|
||||
for txin in tx.inputs():
|
||||
if txin['type'] == 'coinbase':
|
||||
continue
|
||||
prevout_hash = txin['prevout_hash']
|
||||
prevout_n = txin['prevout_n']
|
||||
spending_tx_hash = self.spent_outpoints[prevout_hash].get(prevout_n)
|
||||
if spending_tx_hash is None:
|
||||
continue
|
||||
# this outpoint has already been spent, by spending_tx
|
||||
assert spending_tx_hash in self.transactions
|
||||
conflicting_txns |= {spending_tx_hash}
|
||||
if tx_hash in conflicting_txns:
|
||||
# this tx is already in history, so it conflicts with itself
|
||||
if len(conflicting_txns) > 1:
|
||||
raise Exception('Found conflicting transactions already in wallet history.')
|
||||
conflicting_txns -= {tx_hash}
|
||||
return conflicting_txns
|
||||
|
||||
def add_transaction(self, tx_hash, tx, allow_unrelated=False):
|
||||
assert tx_hash, tx_hash
|
||||
assert tx, tx
|
||||
assert tx.is_complete()
|
||||
# assert tx_hash == tx.txid() # disabled as expensive; test done by Synchronizer.
|
||||
# we need self.transaction_lock but get_tx_height will take self.lock
|
||||
# so we need to take that too here, to enforce order of locks
|
||||
with self.lock, self.transaction_lock:
|
||||
# NOTE: returning if tx in self.transactions might seem like a good idea
|
||||
# BUT we track is_mine inputs in a txn, and during subsequent calls
|
||||
# of add_transaction tx, we might learn of more-and-more inputs of
|
||||
# being is_mine, as we roll the gap_limit forward
|
||||
is_coinbase = tx.inputs()[0]['type'] == 'coinbase'
|
||||
tx_height = self.get_tx_height(tx_hash).height
|
||||
if not allow_unrelated:
|
||||
# note that during sync, if the transactions are not properly sorted,
|
||||
# it could happen that we think tx is unrelated but actually one of the inputs is is_mine.
|
||||
# this is the main motivation for allow_unrelated
|
||||
is_mine = any([self.is_mine(self.get_txin_address(txin)) for txin in tx.inputs()])
|
||||
is_for_me = any([self.is_mine(self.get_txout_address(txo)) for txo in tx.outputs()])
|
||||
if not is_mine and not is_for_me:
|
||||
raise UnrelatedTransactionException()
|
||||
# Find all conflicting transactions.
|
||||
# In case of a conflict,
|
||||
# 1. confirmed > mempool > local
|
||||
# 2. this new txn has priority over existing ones
|
||||
# When this method exits, there must NOT be any conflict, so
|
||||
# either keep this txn and remove all conflicting (along with dependencies)
|
||||
# or drop this txn
|
||||
conflicting_txns = self.get_conflicting_transactions(tx_hash, tx)
|
||||
if conflicting_txns:
|
||||
existing_mempool_txn = any(
|
||||
self.get_tx_height(tx_hash2).height in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT)
|
||||
for tx_hash2 in conflicting_txns)
|
||||
existing_confirmed_txn = any(
|
||||
self.get_tx_height(tx_hash2).height > 0
|
||||
for tx_hash2 in conflicting_txns)
|
||||
if existing_confirmed_txn and tx_height <= 0:
|
||||
# this is a non-confirmed tx that conflicts with confirmed txns; drop.
|
||||
return False
|
||||
if existing_mempool_txn and tx_height == TX_HEIGHT_LOCAL:
|
||||
# this is a local tx that conflicts with non-local txns; drop.
|
||||
return False
|
||||
# keep this txn and remove all conflicting
|
||||
to_remove = set()
|
||||
to_remove |= conflicting_txns
|
||||
for conflicting_tx_hash in conflicting_txns:
|
||||
to_remove |= self.get_depending_transactions(conflicting_tx_hash)
|
||||
for tx_hash2 in to_remove:
|
||||
self.remove_transaction(tx_hash2)
|
||||
# add inputs
|
||||
def add_value_from_prev_output():
|
||||
dd = self.txo.get(prevout_hash, {})
|
||||
# note: this nested loop takes linear time in num is_mine outputs of prev_tx
|
||||
for addr, outputs in dd.items():
|
||||
# note: instead of [(n, v, is_cb), ...]; we could store: {n -> (v, is_cb)}
|
||||
for n, v, is_cb in outputs:
|
||||
if n == prevout_n:
|
||||
if addr and self.is_mine(addr):
|
||||
if d.get(addr) is None:
|
||||
d[addr] = set()
|
||||
d[addr].add((ser, v))
|
||||
return
|
||||
self.txi[tx_hash] = d = {}
|
||||
for txi in tx.inputs():
|
||||
if txi['type'] == 'coinbase':
|
||||
continue
|
||||
prevout_hash = txi['prevout_hash']
|
||||
prevout_n = txi['prevout_n']
|
||||
ser = prevout_hash + ':%d' % prevout_n
|
||||
self.spent_outpoints[prevout_hash][prevout_n] = tx_hash
|
||||
add_value_from_prev_output()
|
||||
# add outputs
|
||||
self.txo[tx_hash] = d = {}
|
||||
for n, txo in enumerate(tx.outputs()):
|
||||
v = txo[2]
|
||||
ser = tx_hash + ':%d'%n
|
||||
addr = self.get_txout_address(txo)
|
||||
if addr and self.is_mine(addr):
|
||||
if d.get(addr) is None:
|
||||
d[addr] = []
|
||||
d[addr].append((n, v, is_coinbase))
|
||||
# give v to txi that spends me
|
||||
next_tx = self.spent_outpoints[tx_hash].get(n)
|
||||
if next_tx is not None:
|
||||
dd = self.txi.get(next_tx, {})
|
||||
if dd.get(addr) is None:
|
||||
dd[addr] = set()
|
||||
if (ser, v) not in dd[addr]:
|
||||
dd[addr].add((ser, v))
|
||||
self._add_tx_to_local_history(next_tx)
|
||||
# add to local history
|
||||
self._add_tx_to_local_history(tx_hash)
|
||||
# save
|
||||
self.transactions[tx_hash] = tx
|
||||
return True
|
||||
|
||||
def remove_transaction(self, tx_hash):
|
||||
def remove_from_spent_outpoints():
|
||||
# undo spends in spent_outpoints
|
||||
if tx is not None: # if we have the tx, this branch is faster
|
||||
for txin in tx.inputs():
|
||||
if txin['type'] == 'coinbase':
|
||||
continue
|
||||
prevout_hash = txin['prevout_hash']
|
||||
prevout_n = txin['prevout_n']
|
||||
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
|
||||
if not self.spent_outpoints[prevout_hash]:
|
||||
self.spent_outpoints.pop(prevout_hash)
|
||||
else: # expensive but always works
|
||||
for prevout_hash, d in list(self.spent_outpoints.items()):
|
||||
for prevout_n, spending_txid in d.items():
|
||||
if spending_txid == tx_hash:
|
||||
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
|
||||
if not self.spent_outpoints[prevout_hash]:
|
||||
self.spent_outpoints.pop(prevout_hash)
|
||||
# Remove this tx itself; if nothing spends from it.
|
||||
# It is not so clear what to do if other txns spend from it, but it will be
|
||||
# removed when those other txns are removed.
|
||||
if not self.spent_outpoints[tx_hash]:
|
||||
self.spent_outpoints.pop(tx_hash)
|
||||
|
||||
with self.transaction_lock:
|
||||
self.print_error("removing tx from history", tx_hash)
|
||||
tx = self.transactions.pop(tx_hash, None)
|
||||
remove_from_spent_outpoints()
|
||||
self._remove_tx_from_local_history(tx_hash)
|
||||
self.txi.pop(tx_hash, None)
|
||||
self.txo.pop(tx_hash, None)
|
||||
|
||||
def get_depending_transactions(self, tx_hash):
|
||||
"""Returns all (grand-)children of tx_hash in this wallet."""
|
||||
children = set()
|
||||
for other_hash in self.spent_outpoints[tx_hash].values():
|
||||
children.add(other_hash)
|
||||
children |= self.get_depending_transactions(other_hash)
|
||||
return children
|
||||
|
||||
def receive_tx_callback(self, tx_hash, tx, tx_height):
|
||||
self.add_unverified_tx(tx_hash, tx_height)
|
||||
self.add_transaction(tx_hash, tx, allow_unrelated=True)
|
||||
|
||||
def receive_history_callback(self, addr, hist, tx_fees):
|
||||
with self.lock:
|
||||
old_hist = self.get_address_history(addr)
|
||||
for tx_hash, height in old_hist:
|
||||
if (tx_hash, height) not in hist:
|
||||
# make tx local
|
||||
self.unverified_tx.pop(tx_hash, None)
|
||||
self.verified_tx.pop(tx_hash, None)
|
||||
if self.verifier:
|
||||
self.verifier.remove_spv_proof_for_tx(tx_hash)
|
||||
self.history[addr] = hist
|
||||
|
||||
for tx_hash, tx_height in hist:
|
||||
# add it in case it was previously unconfirmed
|
||||
self.add_unverified_tx(tx_hash, tx_height)
|
||||
# if addr is new, we have to recompute txi and txo
|
||||
tx = self.transactions.get(tx_hash)
|
||||
if tx is None:
|
||||
continue
|
||||
self.add_transaction(tx_hash, tx, allow_unrelated=True)
|
||||
|
||||
# Store fees
|
||||
self.tx_fees.update(tx_fees)
|
||||
|
||||
@profiler
|
||||
def load_transactions(self):
|
||||
# load txi, txo, tx_fees
|
||||
# bookkeeping data of is_mine inputs of transactions
|
||||
self.txi = self.storage.get('txi', {}) # txid -> address -> (prev_outpoint, value)
|
||||
for txid, d in list(self.txi.items()):
|
||||
for addr, lst in d.items():
|
||||
self.txi[txid][addr] = set([tuple(x) for x in lst])
|
||||
# bookkeeping data of is_mine outputs of transactions
|
||||
self.txo = self.storage.get('txo', {}) # txid -> address -> (output_index, value, is_coinbase)
|
||||
self.tx_fees = self.storage.get('tx_fees', {})
|
||||
tx_list = self.storage.get('transactions', {})
|
||||
# load transactions
|
||||
self.transactions = {}
|
||||
for tx_hash, raw in tx_list.items():
|
||||
tx = Transaction(raw)
|
||||
self.transactions[tx_hash] = tx
|
||||
if self.txi.get(tx_hash) is None and self.txo.get(tx_hash) is None:
|
||||
self.print_error("removing unreferenced tx", tx_hash)
|
||||
self.transactions.pop(tx_hash)
|
||||
# load spent_outpoints
|
||||
_spent_outpoints = self.storage.get('spent_outpoints', {})
|
||||
self.spent_outpoints = defaultdict(dict)
|
||||
for prevout_hash, d in _spent_outpoints.items():
|
||||
for prevout_n_str, spending_txid in d.items():
|
||||
prevout_n = int(prevout_n_str)
|
||||
if spending_txid not in self.transactions:
|
||||
continue # only care about txns we have
|
||||
self.spent_outpoints[prevout_hash][prevout_n] = spending_txid
|
||||
|
||||
@profiler
|
||||
def load_local_history(self):
|
||||
self._history_local = {} # address -> set(txid)
|
||||
self._address_history_changed_events = defaultdict(asyncio.Event) # address -> Event
|
||||
for txid in itertools.chain(self.txi, self.txo):
|
||||
self._add_tx_to_local_history(txid)
|
||||
|
||||
@profiler
|
||||
def check_history(self):
|
||||
save = False
|
||||
hist_addrs_mine = list(filter(lambda k: self.is_mine(k), self.history.keys()))
|
||||
hist_addrs_not_mine = list(filter(lambda k: not self.is_mine(k), self.history.keys()))
|
||||
for addr in hist_addrs_not_mine:
|
||||
self.history.pop(addr)
|
||||
save = True
|
||||
for addr in hist_addrs_mine:
|
||||
hist = self.history[addr]
|
||||
for tx_hash, tx_height in hist:
|
||||
if self.txi.get(tx_hash) or self.txo.get(tx_hash):
|
||||
continue
|
||||
tx = self.transactions.get(tx_hash)
|
||||
if tx is not None:
|
||||
self.add_transaction(tx_hash, tx, allow_unrelated=True)
|
||||
save = True
|
||||
if save:
|
||||
self.save_transactions()
|
||||
|
||||
def remove_local_transactions_we_dont_have(self):
|
||||
txid_set = set(self.txi) | set(self.txo)
|
||||
for txid in txid_set:
|
||||
tx_height = self.get_tx_height(txid).height
|
||||
if tx_height == TX_HEIGHT_LOCAL and txid not in self.transactions:
|
||||
self.remove_transaction(txid)
|
||||
|
||||
@profiler
|
||||
def save_transactions(self, write=False):
|
||||
with self.transaction_lock:
|
||||
tx = {}
|
||||
for k,v in self.transactions.items():
|
||||
tx[k] = str(v)
|
||||
self.storage.put('transactions', tx)
|
||||
self.storage.put('txi', self.txi)
|
||||
self.storage.put('txo', self.txo)
|
||||
self.storage.put('tx_fees', self.tx_fees)
|
||||
self.storage.put('addr_history', self.history)
|
||||
self.storage.put('spent_outpoints', self.spent_outpoints)
|
||||
if write:
|
||||
self.storage.write()
|
||||
|
||||
def save_verified_tx(self, write=False):
|
||||
with self.lock:
|
||||
verified_tx_to_save = {}
|
||||
for txid, tx_info in self.verified_tx.items():
|
||||
verified_tx_to_save[txid] = (tx_info.height, tx_info.timestamp,
|
||||
tx_info.txpos, tx_info.header_hash, tx_info.flodata)
|
||||
self.storage.put('verified_tx3', verified_tx_to_save)
|
||||
if write:
|
||||
self.storage.write()
|
||||
|
||||
def clear_history(self):
|
||||
with self.lock:
|
||||
with self.transaction_lock:
|
||||
self.txi = {}
|
||||
self.txo = {}
|
||||
self.tx_fees = {}
|
||||
self.spent_outpoints = defaultdict(dict)
|
||||
self.history = {}
|
||||
self.verified_tx = {}
|
||||
self.transactions = {} # type: Dict[str, Transaction]
|
||||
self.save_transactions()
|
||||
|
||||
def get_txpos(self, tx_hash):
|
||||
"""Returns (height, txpos) tuple, even if the tx is unverified."""
|
||||
with self.lock:
|
||||
if tx_hash in self.verified_tx:
|
||||
info = self.verified_tx[tx_hash]
|
||||
return info.height, info.txpos
|
||||
elif tx_hash in self.unverified_tx:
|
||||
height = self.unverified_tx[tx_hash]
|
||||
return (height, 0) if height > 0 else ((1e9 - height), 0)
|
||||
else:
|
||||
return (1e9+1, 0)
|
||||
|
||||
def with_local_height_cached(func):
|
||||
# get local height only once, as it's relatively expensive.
|
||||
# take care that nested calls work as expected
|
||||
def f(self, *args, **kwargs):
|
||||
orig_val = getattr(self.threadlocal_cache, 'local_height', None)
|
||||
self.threadlocal_cache.local_height = orig_val or self.get_local_height()
|
||||
try:
|
||||
return func(self, *args, **kwargs)
|
||||
finally:
|
||||
self.threadlocal_cache.local_height = orig_val
|
||||
return f
|
||||
|
||||
@with_local_height_cached
|
||||
def get_history(self, domain=None):
|
||||
# get domain
|
||||
if domain is None:
|
||||
domain = self.history.keys()
|
||||
domain = set(domain)
|
||||
# 1. Get the history of each address in the domain, maintain the
|
||||
# delta of a tx as the sum of its deltas on domain addresses
|
||||
tx_deltas = defaultdict(int)
|
||||
for addr in domain:
|
||||
h = self.get_address_history(addr)
|
||||
for tx_hash, height in h:
|
||||
delta = self.get_tx_delta(tx_hash, addr)
|
||||
if delta is None or tx_deltas[tx_hash] is None:
|
||||
tx_deltas[tx_hash] = None
|
||||
else:
|
||||
tx_deltas[tx_hash] += delta
|
||||
# 2. create sorted history
|
||||
history = []
|
||||
for tx_hash in tx_deltas:
|
||||
delta = tx_deltas[tx_hash]
|
||||
tx_mined_status = self.get_tx_height(tx_hash)
|
||||
history.append((tx_hash, tx_mined_status, delta))
|
||||
history.sort(key = lambda x: self.get_txpos(x[0]), reverse=True)
|
||||
# 3. add balance
|
||||
c, u, x = self.get_balance(domain)
|
||||
balance = c + u + x
|
||||
h2 = []
|
||||
for tx_hash, tx_mined_status, delta in history:
|
||||
h2.append((tx_hash, tx_mined_status, delta, balance))
|
||||
if balance is None or delta is None:
|
||||
balance = None
|
||||
else:
|
||||
balance -= delta
|
||||
h2.reverse()
|
||||
# fixme: this may happen if history is incomplete
|
||||
if balance not in [None, 0]:
|
||||
self.print_error("Error: history not synchronized")
|
||||
return []
|
||||
|
||||
return h2
|
||||
|
||||
def _add_tx_to_local_history(self, txid):
|
||||
with self.transaction_lock:
|
||||
for addr in itertools.chain(self.txi.get(txid, []), self.txo.get(txid, [])):
|
||||
cur_hist = self._history_local.get(addr, set())
|
||||
cur_hist.add(txid)
|
||||
self._history_local[addr] = cur_hist
|
||||
self._mark_address_history_changed(addr)
|
||||
|
||||
def _remove_tx_from_local_history(self, txid):
|
||||
with self.transaction_lock:
|
||||
for addr in itertools.chain(self.txi.get(txid, []), self.txo.get(txid, [])):
|
||||
cur_hist = self._history_local.get(addr, set())
|
||||
try:
|
||||
cur_hist.remove(txid)
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
self._history_local[addr] = cur_hist
|
||||
|
||||
def _mark_address_history_changed(self, addr: str) -> None:
|
||||
# history for this address changed, wake up coroutines:
|
||||
self._address_history_changed_events[addr].set()
|
||||
# clear event immediately so that coroutines can wait() for the next change:
|
||||
self._address_history_changed_events[addr].clear()
|
||||
|
||||
async def wait_for_address_history_to_change(self, addr: str) -> None:
|
||||
"""Wait until the server tells us about a new transaction related to addr.
|
||||
|
||||
Unconfirmed and confirmed transactions are not distinguished, and so e.g. SPV
|
||||
is not taken into account.
|
||||
"""
|
||||
assert self.is_mine(addr), "address needs to be is_mine to be watched"
|
||||
await self._address_history_changed_events[addr].wait()
|
||||
|
||||
def add_unverified_tx(self, tx_hash, tx_height):
|
||||
if tx_hash in self.verified_tx:
|
||||
if tx_height in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT):
|
||||
with self.lock:
|
||||
self.verified_tx.pop(tx_hash)
|
||||
if self.verifier:
|
||||
self.verifier.remove_spv_proof_for_tx(tx_hash)
|
||||
else:
|
||||
with self.lock:
|
||||
# tx will be verified only if height > 0
|
||||
self.unverified_tx[tx_hash] = tx_height
|
||||
|
||||
def remove_unverified_tx(self, tx_hash, tx_height):
|
||||
with self.lock:
|
||||
new_height = self.unverified_tx.get(tx_hash)
|
||||
if new_height == tx_height:
|
||||
self.unverified_tx.pop(tx_hash, None)
|
||||
|
||||
def add_verified_tx(self, tx_hash: str, info: TxMinedInfo):
|
||||
# Remove from the unverified map and add to the verified map
|
||||
with self.lock:
|
||||
self.unverified_tx.pop(tx_hash, None)
|
||||
self.verified_tx[tx_hash] = info
|
||||
tx_mined_status = self.get_tx_height(tx_hash)
|
||||
self.network.trigger_callback('verified', self, tx_hash, tx_mined_status)
|
||||
|
||||
def get_unverified_txs(self):
|
||||
'''Returns a map from tx hash to transaction height'''
|
||||
with self.lock:
|
||||
return dict(self.unverified_tx) # copy
|
||||
|
||||
def undo_verifications(self, blockchain, height):
|
||||
'''Used by the verifier when a reorg has happened'''
|
||||
txs = set()
|
||||
with self.lock:
|
||||
for tx_hash, info in list(self.verified_tx.items()):
|
||||
tx_height = info.height
|
||||
if tx_height >= height:
|
||||
header = blockchain.read_header(tx_height)
|
||||
if not header or hash_header(header) != info.header_hash:
|
||||
self.verified_tx.pop(tx_hash, None)
|
||||
# NOTE: we should add these txns to self.unverified_tx,
|
||||
# but with what height?
|
||||
# If on the new fork after the reorg, the txn is at the
|
||||
# same height, we will not get a status update for the
|
||||
# address. If the txn is not mined or at a diff height,
|
||||
# we should get a status update. Unless we put tx into
|
||||
# unverified_tx, it will turn into local. So we put it
|
||||
# into unverified_tx with the old height, and if we get
|
||||
# a status update, that will overwrite it.
|
||||
self.unverified_tx[tx_hash] = tx_height
|
||||
txs.add(tx_hash)
|
||||
return txs
|
||||
|
||||
def get_local_height(self):
|
||||
""" return last known height if we are offline """
|
||||
cached_local_height = getattr(self.threadlocal_cache, 'local_height', None)
|
||||
if cached_local_height is not None:
|
||||
return cached_local_height
|
||||
return self.network.get_local_height() if self.network else self.storage.get('stored_height', 0)
|
||||
|
||||
def get_tx_height(self, tx_hash: str) -> TxMinedInfo:
|
||||
with self.lock:
|
||||
if tx_hash in self.verified_tx:
|
||||
info = self.verified_tx[tx_hash]
|
||||
conf = max(self.get_local_height() - info.height + 1, 0)
|
||||
return info._replace(conf=conf)
|
||||
elif tx_hash in self.unverified_tx:
|
||||
height = self.unverified_tx[tx_hash]
|
||||
return TxMinedInfo(height=height, conf=0)
|
||||
else:
|
||||
# local transaction
|
||||
return TxMinedInfo(height=TX_HEIGHT_LOCAL, conf=0)
|
||||
|
||||
def get_flodata(self, tx_hash: str):
|
||||
""" Given a transaction, returns flodata """
|
||||
with self.lock:
|
||||
if tx_hash in self.verified_tx:
|
||||
info = self.verified_tx[tx_hash]
|
||||
flodata = info[5]
|
||||
return flodata
|
||||
elif tx_hash in self.unverified_tx:
|
||||
tx = self.transactions.get(tx_hash)
|
||||
flodata = tx.flodata[5:]
|
||||
return flodata
|
||||
else:
|
||||
# local transaction
|
||||
tx = self.transactions.get(tx_hash)
|
||||
flodata = tx.flodata[5:]
|
||||
return flodata
|
||||
|
||||
def set_up_to_date(self, up_to_date):
|
||||
with self.lock:
|
||||
self.up_to_date = up_to_date
|
||||
if self.network:
|
||||
self.network.notify('status')
|
||||
if up_to_date:
|
||||
self.save_transactions(write=True)
|
||||
# if the verifier is also up to date, persist that too;
|
||||
# otherwise it will persist its results when it finishes
|
||||
if self.verifier and self.verifier.is_up_to_date():
|
||||
self.save_verified_tx(write=True)
|
||||
|
||||
def is_up_to_date(self):
|
||||
with self.lock: return self.up_to_date
|
||||
|
||||
@with_transaction_lock
|
||||
def get_tx_delta(self, tx_hash, address):
|
||||
"""effect of tx on address"""
|
||||
delta = 0
|
||||
# substract the value of coins sent from address
|
||||
d = self.txi.get(tx_hash, {}).get(address, [])
|
||||
for n, v in d:
|
||||
delta -= v
|
||||
# add the value of the coins received at address
|
||||
d = self.txo.get(tx_hash, {}).get(address, [])
|
||||
for n, v, cb in d:
|
||||
delta += v
|
||||
return delta
|
||||
|
||||
@with_transaction_lock
|
||||
def get_tx_value(self, txid):
|
||||
"""effect of tx on the entire domain"""
|
||||
delta = 0
|
||||
for addr, d in self.txi.get(txid, {}).items():
|
||||
for n, v in d:
|
||||
delta -= v
|
||||
for addr, d in self.txo.get(txid, {}).items():
|
||||
for n, v, cb in d:
|
||||
delta += v
|
||||
return delta
|
||||
|
||||
def get_wallet_delta(self, tx: Transaction):
|
||||
""" effect of tx on wallet """
|
||||
is_relevant = False # "related to wallet?"
|
||||
is_mine = False
|
||||
is_pruned = False
|
||||
is_partial = False
|
||||
v_in = v_out = v_out_mine = 0
|
||||
for txin in tx.inputs():
|
||||
addr = self.get_txin_address(txin)
|
||||
if self.is_mine(addr):
|
||||
is_mine = True
|
||||
is_relevant = True
|
||||
d = self.txo.get(txin['prevout_hash'], {}).get(addr, [])
|
||||
for n, v, cb in d:
|
||||
if n == txin['prevout_n']:
|
||||
value = v
|
||||
break
|
||||
else:
|
||||
value = None
|
||||
if value is None:
|
||||
is_pruned = True
|
||||
else:
|
||||
v_in += value
|
||||
else:
|
||||
is_partial = True
|
||||
if not is_mine:
|
||||
is_partial = False
|
||||
for o in tx.outputs():
|
||||
v_out += o.value
|
||||
if self.is_mine(o.address):
|
||||
v_out_mine += o.value
|
||||
is_relevant = True
|
||||
if is_pruned:
|
||||
# some inputs are mine:
|
||||
fee = None
|
||||
if is_mine:
|
||||
v = v_out_mine - v_out
|
||||
else:
|
||||
# no input is mine
|
||||
v = v_out_mine
|
||||
else:
|
||||
v = v_out_mine - v_in
|
||||
if is_partial:
|
||||
# some inputs are mine, but not all
|
||||
fee = None
|
||||
else:
|
||||
# all inputs are mine
|
||||
fee = v_in - v_out
|
||||
if not is_mine:
|
||||
fee = None
|
||||
return is_relevant, is_mine, v, fee
|
||||
|
||||
def get_tx_fee(self, tx: Transaction) -> Optional[int]:
|
||||
if not tx:
|
||||
return None
|
||||
if hasattr(tx, '_cached_fee'):
|
||||
return tx._cached_fee
|
||||
with self.lock, self.transaction_lock:
|
||||
is_relevant, is_mine, v, fee = self.get_wallet_delta(tx)
|
||||
if fee is None:
|
||||
txid = tx.txid()
|
||||
fee = self.tx_fees.get(txid)
|
||||
# only cache non-None, as None can still change while syncing
|
||||
if fee is not None:
|
||||
tx._cached_fee = fee
|
||||
return fee
|
||||
|
||||
def get_addr_io(self, address):
|
||||
with self.lock, self.transaction_lock:
|
||||
h = self.get_address_history(address)
|
||||
received = {}
|
||||
sent = {}
|
||||
for tx_hash, height in h:
|
||||
l = self.txo.get(tx_hash, {}).get(address, [])
|
||||
for n, v, is_cb in l:
|
||||
received[tx_hash + ':%d'%n] = (height, v, is_cb)
|
||||
for tx_hash, height in h:
|
||||
l = self.txi.get(tx_hash, {}).get(address, [])
|
||||
for txi, v in l:
|
||||
sent[txi] = height
|
||||
return received, sent
|
||||
|
||||
def get_addr_utxo(self, address):
|
||||
coins, spent = self.get_addr_io(address)
|
||||
for txi in spent:
|
||||
coins.pop(txi)
|
||||
out = {}
|
||||
for txo, v in coins.items():
|
||||
tx_height, value, is_cb = v
|
||||
prevout_hash, prevout_n = txo.split(':')
|
||||
x = {
|
||||
'address':address,
|
||||
'value':value,
|
||||
'prevout_n':int(prevout_n),
|
||||
'prevout_hash':prevout_hash,
|
||||
'height':tx_height,
|
||||
'coinbase':is_cb
|
||||
}
|
||||
out[txo] = x
|
||||
return out
|
||||
|
||||
# return the total amount ever received by an address
|
||||
def get_addr_received(self, address):
|
||||
received, sent = self.get_addr_io(address)
|
||||
return sum([v for height, v, is_cb in received.values()])
|
||||
|
||||
@with_local_height_cached
|
||||
def get_addr_balance(self, address):
|
||||
"""Return the balance of a FLO address:
|
||||
confirmed and matured, unconfirmed, unmatured
|
||||
"""
|
||||
received, sent = self.get_addr_io(address)
|
||||
c = u = x = 0
|
||||
local_height = self.get_local_height()
|
||||
for txo, (tx_height, v, is_cb) in received.items():
|
||||
if is_cb and tx_height + COINBASE_MATURITY > local_height:
|
||||
x += v
|
||||
elif tx_height > 0:
|
||||
c += v
|
||||
else:
|
||||
u += v
|
||||
if txo in sent:
|
||||
if sent[txo] > 0:
|
||||
c -= v
|
||||
else:
|
||||
u -= v
|
||||
return c, u, x
|
||||
|
||||
@with_local_height_cached
|
||||
def get_utxos(self, domain=None, excluded=None, mature=False, confirmed_only=False, nonlocal_only=False):
|
||||
coins = []
|
||||
if domain is None:
|
||||
domain = self.get_addresses()
|
||||
domain = set(domain)
|
||||
if excluded:
|
||||
domain = set(domain) - excluded
|
||||
for addr in domain:
|
||||
utxos = self.get_addr_utxo(addr)
|
||||
for x in utxos.values():
|
||||
if confirmed_only and x['height'] <= 0:
|
||||
continue
|
||||
if nonlocal_only and x['height'] == TX_HEIGHT_LOCAL:
|
||||
continue
|
||||
if mature and x['coinbase'] and x['height'] + COINBASE_MATURITY > self.get_local_height():
|
||||
continue
|
||||
coins.append(x)
|
||||
continue
|
||||
return coins
|
||||
|
||||
def get_balance(self, domain=None):
|
||||
if domain is None:
|
||||
domain = self.get_addresses()
|
||||
domain = set(domain)
|
||||
cc = uu = xx = 0
|
||||
for addr in domain:
|
||||
c, u, x = self.get_addr_balance(addr)
|
||||
cc += c
|
||||
uu += u
|
||||
xx += x
|
||||
return cc, uu, xx
|
||||
|
||||
def is_used(self, address):
|
||||
h = self.history.get(address,[])
|
||||
return len(h) != 0
|
||||
|
||||
def is_empty(self, address):
|
||||
c, u, x = self.get_addr_balance(address)
|
||||
return c+u+x == 0
|
||||
|
||||
def synchronize(self):
|
||||
pass
|
||||
@ -1,134 +0,0 @@
|
||||
# Electrum - lightweight Bitcoin client
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
import asyncio
|
||||
import json
|
||||
import locale
|
||||
import traceback
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
|
||||
from .version import ELECTRUM_VERSION
|
||||
from . import constants
|
||||
from .i18n import _
|
||||
from .util import make_aiohttp_session
|
||||
|
||||
|
||||
class BaseCrashReporter:
|
||||
report_server = "https://crashhub.electrum.org"
|
||||
config_key = "show_crash_reporter"
|
||||
issue_template = """<h2>Traceback</h2>
|
||||
<pre>
|
||||
{traceback}
|
||||
</pre>
|
||||
|
||||
<h2>Additional information</h2>
|
||||
<ul>
|
||||
<li>Electrum version: {app_version}</li>
|
||||
<li>Python version: {python_version}</li>
|
||||
<li>Operating system: {os}</li>
|
||||
<li>Wallet type: {wallet_type}</li>
|
||||
<li>Locale: {locale}</li>
|
||||
</ul>
|
||||
"""
|
||||
CRASH_MESSAGE = _('Something went wrong while executing Electrum.')
|
||||
CRASH_TITLE = _('Sorry!')
|
||||
REQUEST_HELP_MESSAGE = _('To help us diagnose and fix the problem, you can send us a bug report that contains '
|
||||
'useful debug information:')
|
||||
DESCRIBE_ERROR_MESSAGE = _("Please briefly describe what led to the error (optional):")
|
||||
ASK_CONFIRM_SEND = _("Do you want to send this report?")
|
||||
|
||||
def __init__(self, exctype, value, tb):
|
||||
self.exc_args = (exctype, value, tb)
|
||||
|
||||
def send_report(self, asyncio_loop, proxy, endpoint="/crash"):
|
||||
if constants.net.GENESIS[-4:] not in ["4943", "e26f"] and ".electrum.org" in BaseCrashReporter.report_server:
|
||||
# Gah! Some kind of altcoin wants to send us crash reports.
|
||||
raise Exception(_("Missing report URL."))
|
||||
report = self.get_traceback_info()
|
||||
report.update(self.get_additional_info())
|
||||
report = json.dumps(report)
|
||||
coro = self.do_post(proxy, BaseCrashReporter.report_server + endpoint, data=report)
|
||||
response = asyncio.run_coroutine_threadsafe(coro, asyncio_loop).result(5)
|
||||
return response
|
||||
|
||||
async def do_post(self, proxy, url, data):
|
||||
async with make_aiohttp_session(proxy) as session:
|
||||
async with session.post(url, data=data) as resp:
|
||||
return await resp.text()
|
||||
|
||||
def get_traceback_info(self):
|
||||
exc_string = str(self.exc_args[1])
|
||||
stack = traceback.extract_tb(self.exc_args[2])
|
||||
readable_trace = "".join(traceback.format_list(stack))
|
||||
id = {
|
||||
"file": stack[-1].filename,
|
||||
"name": stack[-1].name,
|
||||
"type": self.exc_args[0].__name__
|
||||
}
|
||||
return {
|
||||
"exc_string": exc_string,
|
||||
"stack": readable_trace,
|
||||
"id": id
|
||||
}
|
||||
|
||||
def get_additional_info(self):
|
||||
args = {
|
||||
"app_version": ELECTRUM_VERSION,
|
||||
"python_version": sys.version,
|
||||
"os": self.get_os_version(),
|
||||
"wallet_type": "unknown",
|
||||
"locale": locale.getdefaultlocale()[0] or "?",
|
||||
"description": self.get_user_description()
|
||||
}
|
||||
try:
|
||||
args["wallet_type"] = self.get_wallet_type()
|
||||
except:
|
||||
# Maybe the wallet isn't loaded yet
|
||||
pass
|
||||
try:
|
||||
args["app_version"] = self.get_git_version()
|
||||
except:
|
||||
# This is probably not running from source
|
||||
pass
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def get_git_version():
|
||||
dir = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
version = subprocess.check_output(
|
||||
['git', 'describe', '--always', '--dirty'], cwd=dir)
|
||||
return str(version, "utf8").strip()
|
||||
|
||||
def get_report_string(self):
|
||||
info = self.get_additional_info()
|
||||
info["traceback"] = "".join(traceback.format_exception(*self.exc_args))
|
||||
return self.issue_template.format(**info)
|
||||
|
||||
def get_user_description(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_wallet_type(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_os_version(self):
|
||||
raise NotImplementedError
|
||||
@ -1,620 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2016 Thomas Voegtlin
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from functools import partial
|
||||
from typing import List, TYPE_CHECKING, Tuple, NamedTuple, Any
|
||||
|
||||
from . import bitcoin
|
||||
from . import keystore
|
||||
from .bip32 import is_bip32_derivation, xpub_type
|
||||
from .keystore import bip44_derivation, purpose48_derivation
|
||||
from .wallet import (Imported_Wallet, Standard_Wallet, Multisig_Wallet,
|
||||
wallet_types, Wallet, Abstract_Wallet)
|
||||
from .storage import (WalletStorage, STO_EV_USER_PW, STO_EV_XPUB_PW,
|
||||
get_derivation_used_for_hw_device_encryption)
|
||||
from .i18n import _
|
||||
from .util import UserCancelled, InvalidPassword, WalletFileException
|
||||
from .simple_config import SimpleConfig
|
||||
from .plugin import Plugins
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .plugin import DeviceInfo
|
||||
|
||||
|
||||
# hardware device setup purpose
|
||||
HWD_SETUP_NEW_WALLET, HWD_SETUP_DECRYPT_WALLET = range(0, 2)
|
||||
|
||||
|
||||
class ScriptTypeNotSupported(Exception): pass
|
||||
|
||||
|
||||
class GoBack(Exception): pass
|
||||
|
||||
|
||||
class WizardStackItem(NamedTuple):
|
||||
action: Any
|
||||
args: Any
|
||||
storage_data: dict
|
||||
|
||||
|
||||
class BaseWizard(object):
|
||||
|
||||
def __init__(self, config: SimpleConfig, plugins: Plugins, storage: WalletStorage):
|
||||
super(BaseWizard, self).__init__()
|
||||
self.config = config
|
||||
self.plugins = plugins
|
||||
self.storage = storage
|
||||
self.wallet = None # type: Abstract_Wallet
|
||||
self._stack = [] # type: List[WizardStackItem]
|
||||
self.plugin = None
|
||||
self.keystores = []
|
||||
self.is_kivy = config.get('gui') == 'kivy'
|
||||
self.seed_type = None
|
||||
|
||||
def set_icon(self, icon):
|
||||
pass
|
||||
|
||||
def run(self, *args):
|
||||
action = args[0]
|
||||
args = args[1:]
|
||||
storage_data = self.storage.get_all_data()
|
||||
self._stack.append(WizardStackItem(action, args, storage_data))
|
||||
if not action:
|
||||
return
|
||||
if type(action) is tuple:
|
||||
self.plugin, action = action
|
||||
if self.plugin and hasattr(self.plugin, action):
|
||||
f = getattr(self.plugin, action)
|
||||
f(self, *args)
|
||||
elif hasattr(self, action):
|
||||
f = getattr(self, action)
|
||||
f(*args)
|
||||
else:
|
||||
raise Exception("unknown action", action)
|
||||
|
||||
def can_go_back(self):
|
||||
return len(self._stack) > 1
|
||||
|
||||
def go_back(self):
|
||||
if not self.can_go_back():
|
||||
return
|
||||
# pop 'current' frame
|
||||
self._stack.pop()
|
||||
# pop 'previous' frame
|
||||
stack_item = self._stack.pop()
|
||||
# try to undo side effects since we last entered 'previous' frame
|
||||
# FIXME only self.storage is properly restored
|
||||
self.storage.overwrite_all_data(stack_item.storage_data)
|
||||
# rerun 'previous' frame
|
||||
self.run(stack_item.action, *stack_item.args)
|
||||
|
||||
def reset_stack(self):
|
||||
self._stack = []
|
||||
|
||||
def new(self):
|
||||
name = os.path.basename(self.storage.path)
|
||||
title = _("Create") + ' ' + name
|
||||
message = '\n'.join([
|
||||
_("What kind of wallet do you want to create?")
|
||||
])
|
||||
wallet_kinds = [
|
||||
('standard', _("Standard wallet")),
|
||||
('multisig', _("Multi-signature wallet")),
|
||||
('imported', _("Import FLO addresses or private keys")),
|
||||
]
|
||||
choices = [pair for pair in wallet_kinds if pair[0] in wallet_types]
|
||||
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.on_wallet_type)
|
||||
|
||||
def upgrade_storage(self):
|
||||
exc = None
|
||||
def on_finished():
|
||||
if exc is None:
|
||||
self.wallet = Wallet(self.storage)
|
||||
self.terminate()
|
||||
else:
|
||||
raise exc
|
||||
def do_upgrade():
|
||||
nonlocal exc
|
||||
try:
|
||||
self.storage.upgrade()
|
||||
except Exception as e:
|
||||
exc = e
|
||||
self.waiting_dialog(do_upgrade, _('Upgrading wallet format...'), on_finished=on_finished)
|
||||
|
||||
def load_2fa(self):
|
||||
self.storage.put('wallet_type', '2fa')
|
||||
self.storage.put('use_trustedcoin', True)
|
||||
self.plugin = self.plugins.load_plugin('trustedcoin')
|
||||
|
||||
def on_wallet_type(self, choice):
|
||||
self.wallet_type = choice
|
||||
if choice == 'standard':
|
||||
action = 'choose_keystore'
|
||||
elif choice == 'multisig':
|
||||
action = 'choose_multisig'
|
||||
elif choice == '2fa':
|
||||
self.load_2fa()
|
||||
action = self.storage.get_action()
|
||||
elif choice == 'imported':
|
||||
action = 'import_addresses_or_keys'
|
||||
self.run(action)
|
||||
|
||||
def choose_multisig(self):
|
||||
def on_multisig(m, n):
|
||||
multisig_type = "%dof%d" % (m, n)
|
||||
self.storage.put('wallet_type', multisig_type)
|
||||
self.n = n
|
||||
self.run('choose_keystore')
|
||||
self.multisig_dialog(run_next=on_multisig)
|
||||
|
||||
def choose_keystore(self):
|
||||
assert self.wallet_type in ['standard', 'multisig']
|
||||
i = len(self.keystores)
|
||||
title = _('Add cosigner') + ' (%d of %d)'%(i+1, self.n) if self.wallet_type=='multisig' else _('Keystore')
|
||||
if self.wallet_type =='standard' or i==0:
|
||||
message = _('Do you want to create a new seed, or to restore a wallet using an existing seed?')
|
||||
choices = [
|
||||
('choose_seed_type', _('Create a new seed')),
|
||||
('restore_from_seed', _('I already have a seed')),
|
||||
('restore_from_key', _('Use a master key')),
|
||||
]
|
||||
if not self.is_kivy:
|
||||
choices.append(('choose_hw_device', _('Use a hardware device')))
|
||||
else:
|
||||
message = _('Add a cosigner to your multi-sig wallet')
|
||||
choices = [
|
||||
('restore_from_key', _('Enter cosigner key')),
|
||||
('restore_from_seed', _('Enter cosigner seed')),
|
||||
]
|
||||
if not self.is_kivy:
|
||||
choices.append(('choose_hw_device', _('Cosign with hardware device')))
|
||||
|
||||
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.run)
|
||||
|
||||
def import_addresses_or_keys(self):
|
||||
v = lambda x: keystore.is_address_list(x) or keystore.is_private_key_list(x)
|
||||
title = _("Import FLO Addresses")
|
||||
message = _("Enter a list of FLO addresses (this will create a watching-only wallet), or a list of private keys.")
|
||||
self.add_xpub_dialog(title=title, message=message, run_next=self.on_import,
|
||||
is_valid=v, allow_multi=True, show_wif_help=True)
|
||||
|
||||
def on_import(self, text):
|
||||
# create a temporary wallet and exploit that modifications
|
||||
# will be reflected on self.storage
|
||||
if keystore.is_address_list(text):
|
||||
w = Imported_Wallet(self.storage)
|
||||
addresses = text.split()
|
||||
good_inputs, bad_inputs = w.import_addresses(addresses, write_to_disk=False)
|
||||
elif keystore.is_private_key_list(text):
|
||||
k = keystore.Imported_KeyStore({})
|
||||
self.storage.put('keystore', k.dump())
|
||||
w = Imported_Wallet(self.storage)
|
||||
keys = keystore.get_private_keys(text)
|
||||
good_inputs, bad_inputs = w.import_private_keys(keys, None, write_to_disk=False)
|
||||
self.keystores.append(w.keystore)
|
||||
else:
|
||||
return self.terminate()
|
||||
if bad_inputs:
|
||||
msg = "\n".join(f"{key[:10]}... ({msg})" for key, msg in bad_inputs[:10])
|
||||
if len(bad_inputs) > 10: msg += '\n...'
|
||||
self.show_error(_("The following inputs could not be imported")
|
||||
+ f' ({len(bad_inputs)}):\n' + msg)
|
||||
# FIXME what if len(good_inputs) == 0 ?
|
||||
return self.run('create_wallet')
|
||||
|
||||
def restore_from_key(self):
|
||||
if self.wallet_type == 'standard':
|
||||
v = keystore.is_master_key
|
||||
title = _("Create keystore from a master key")
|
||||
message = ' '.join([
|
||||
_("To create a watching-only wallet, please enter your master public key (xpub/ypub/zpub)."),
|
||||
_("To create a spending wallet, please enter a master private key (xprv/yprv/zprv).")
|
||||
])
|
||||
self.add_xpub_dialog(title=title, message=message, run_next=self.on_restore_from_key, is_valid=v)
|
||||
else:
|
||||
i = len(self.keystores) + 1
|
||||
self.add_cosigner_dialog(index=i, run_next=self.on_restore_from_key, is_valid=keystore.is_bip32_key)
|
||||
|
||||
def on_restore_from_key(self, text):
|
||||
k = keystore.from_master_key(text)
|
||||
self.on_keystore(k)
|
||||
|
||||
def choose_hw_device(self, purpose=HWD_SETUP_NEW_WALLET):
|
||||
title = _('Hardware Keystore')
|
||||
# check available plugins
|
||||
supported_plugins = self.plugins.get_hardware_support()
|
||||
devices = [] # type: List[Tuple[str, DeviceInfo]]
|
||||
devmgr = self.plugins.device_manager
|
||||
debug_msg = ''
|
||||
|
||||
def failed_getting_device_infos(name, e):
|
||||
nonlocal debug_msg
|
||||
devmgr.print_error(f'error getting device infos for {name}: {e}')
|
||||
indented_error_msg = ' '.join([''] + str(e).splitlines(keepends=True))
|
||||
debug_msg += f' {name}: (error getting device infos)\n{indented_error_msg}\n'
|
||||
|
||||
# scan devices
|
||||
try:
|
||||
scanned_devices = devmgr.scan_devices()
|
||||
except BaseException as e:
|
||||
devmgr.print_error('error scanning devices: {}'.format(repr(e)))
|
||||
debug_msg = ' {}:\n {}'.format(_('Error scanning devices'), e)
|
||||
else:
|
||||
for splugin in supported_plugins:
|
||||
name, plugin = splugin.name, splugin.plugin
|
||||
# plugin init errored?
|
||||
if not plugin:
|
||||
e = splugin.exception
|
||||
indented_error_msg = ' '.join([''] + str(e).splitlines(keepends=True))
|
||||
debug_msg += f' {name}: (error during plugin init)\n'
|
||||
debug_msg += ' {}\n'.format(_('You might have an incompatible library.'))
|
||||
debug_msg += f'{indented_error_msg}\n'
|
||||
continue
|
||||
# see if plugin recognizes 'scanned_devices'
|
||||
try:
|
||||
# FIXME: side-effect: unpaired_device_info sets client.handler
|
||||
device_infos = devmgr.unpaired_device_infos(None, plugin, devices=scanned_devices,
|
||||
include_failing_clients=True)
|
||||
except BaseException as e:
|
||||
traceback.print_exc()
|
||||
failed_getting_device_infos(name, e)
|
||||
continue
|
||||
device_infos_failing = list(filter(lambda di: di.exception is not None, device_infos))
|
||||
for di in device_infos_failing:
|
||||
failed_getting_device_infos(name, di.exception)
|
||||
device_infos_working = list(filter(lambda di: di.exception is None, device_infos))
|
||||
devices += list(map(lambda x: (name, x), device_infos_working))
|
||||
if not debug_msg:
|
||||
debug_msg = ' {}'.format(_('No exceptions encountered.'))
|
||||
if not devices:
|
||||
msg = ''.join([
|
||||
_('No hardware device detected.') + '\n',
|
||||
_('To trigger a rescan, press \'Next\'.') + '\n\n',
|
||||
_('If your device is not detected on Windows, go to "Settings", "Devices", "Connected devices", and do "Remove device". Then, plug your device again.') + ' ',
|
||||
_('On Linux, you might have to add a new permission to your udev rules.') + '\n\n',
|
||||
_('Debug message') + '\n',
|
||||
debug_msg
|
||||
])
|
||||
self.confirm_dialog(title=title, message=msg, run_next= lambda x: self.choose_hw_device(purpose))
|
||||
return
|
||||
# select device
|
||||
self.devices = devices
|
||||
choices = []
|
||||
for name, info in devices:
|
||||
state = _("initialized") if info.initialized else _("wiped")
|
||||
label = info.label or _("An unnamed {}").format(name)
|
||||
try: transport_str = info.device.transport_ui_string[:20]
|
||||
except: transport_str = 'unknown transport'
|
||||
descr = f"{label} [{name}, {state}, {transport_str}]"
|
||||
choices.append(((name, info), descr))
|
||||
msg = _('Select a device') + ':'
|
||||
self.choice_dialog(title=title, message=msg, choices=choices, run_next= lambda *args: self.on_device(*args, purpose=purpose))
|
||||
|
||||
def on_device(self, name, device_info, *, purpose):
|
||||
self.plugin = self.plugins.get_plugin(name)
|
||||
try:
|
||||
self.plugin.setup_device(device_info, self, purpose)
|
||||
except OSError as e:
|
||||
self.show_error(_('We encountered an error while connecting to your device:')
|
||||
+ '\n' + str(e) + '\n'
|
||||
+ _('To try to fix this, we will now re-pair with your device.') + '\n'
|
||||
+ _('Please try again.'))
|
||||
devmgr = self.plugins.device_manager
|
||||
devmgr.unpair_id(device_info.device.id_)
|
||||
self.choose_hw_device(purpose)
|
||||
return
|
||||
except (UserCancelled, GoBack):
|
||||
self.choose_hw_device(purpose)
|
||||
return
|
||||
except BaseException as e:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
self.show_error(str(e))
|
||||
self.choose_hw_device(purpose)
|
||||
return
|
||||
if purpose == HWD_SETUP_NEW_WALLET:
|
||||
def f(derivation, script_type):
|
||||
self.run('on_hw_derivation', name, device_info, derivation, script_type)
|
||||
self.derivation_and_script_type_dialog(f)
|
||||
elif purpose == HWD_SETUP_DECRYPT_WALLET:
|
||||
derivation = get_derivation_used_for_hw_device_encryption()
|
||||
xpub = self.plugin.get_xpub(device_info.device.id_, derivation, 'standard', self)
|
||||
password = keystore.Xpub.get_pubkey_from_xpub(xpub, ())
|
||||
try:
|
||||
self.storage.decrypt(password)
|
||||
except InvalidPassword:
|
||||
# try to clear session so that user can type another passphrase
|
||||
devmgr = self.plugins.device_manager
|
||||
client = devmgr.client_by_id(device_info.device.id_)
|
||||
if hasattr(client, 'clear_session'): # FIXME not all hw wallet plugins have this
|
||||
client.clear_session()
|
||||
raise
|
||||
else:
|
||||
raise Exception('unknown purpose: %s' % purpose)
|
||||
|
||||
def derivation_and_script_type_dialog(self, f):
|
||||
message1 = _('Choose the type of addresses in your wallet.')
|
||||
message2 = '\n'.join([
|
||||
_('You can override the suggested derivation path.'),
|
||||
_('If you are not sure what this is, leave this field unchanged.')
|
||||
])
|
||||
if self.wallet_type == 'multisig':
|
||||
# There is no general standard for HD multisig.
|
||||
# For legacy, this is partially compatible with BIP45; assumes index=0
|
||||
# For segwit, a custom path is used, as there is no standard at all.
|
||||
default_choice_idx = 2
|
||||
choices = [
|
||||
('standard', 'legacy multisig (p2sh)', "m/45'/0"),
|
||||
('p2wsh-p2sh', 'p2sh-segwit multisig (p2wsh-p2sh)', purpose48_derivation(0, xtype='p2wsh-p2sh')),
|
||||
('p2wsh', 'native segwit multisig (p2wsh)', purpose48_derivation(0, xtype='p2wsh')),
|
||||
]
|
||||
else:
|
||||
default_choice_idx = 2
|
||||
choices = [
|
||||
('standard', 'legacy (p2pkh)', bip44_derivation(0, bip43_purpose=44)),
|
||||
('p2wpkh-p2sh', 'p2sh-segwit (p2wpkh-p2sh)', bip44_derivation(0, bip43_purpose=49)),
|
||||
('p2wpkh', 'native segwit (p2wpkh)', bip44_derivation(0, bip43_purpose=84)),
|
||||
]
|
||||
while True:
|
||||
try:
|
||||
self.choice_and_line_dialog(
|
||||
run_next=f, title=_('Script type and Derivation path'), message1=message1,
|
||||
message2=message2, choices=choices, test_text=is_bip32_derivation,
|
||||
default_choice_idx=default_choice_idx)
|
||||
return
|
||||
except ScriptTypeNotSupported as e:
|
||||
self.show_error(e)
|
||||
# let the user choose again
|
||||
|
||||
def on_hw_derivation(self, name, device_info, derivation, xtype):
|
||||
from .keystore import hardware_keystore
|
||||
try:
|
||||
xpub = self.plugin.get_xpub(device_info.device.id_, derivation, xtype, self)
|
||||
except ScriptTypeNotSupported:
|
||||
raise # this is handled in derivation_dialog
|
||||
except BaseException as e:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
self.show_error(e)
|
||||
return
|
||||
d = {
|
||||
'type': 'hardware',
|
||||
'hw_type': name,
|
||||
'derivation': derivation,
|
||||
'xpub': xpub,
|
||||
'label': device_info.label,
|
||||
}
|
||||
k = hardware_keystore(d)
|
||||
self.on_keystore(k)
|
||||
|
||||
def passphrase_dialog(self, run_next, is_restoring=False):
|
||||
title = _('Seed extension')
|
||||
message = '\n'.join([
|
||||
_('You may extend your seed with custom words.'),
|
||||
_('Your seed extension must be saved together with your seed.'),
|
||||
])
|
||||
warning = '\n'.join([
|
||||
_('Note that this is NOT your encryption password.'),
|
||||
_('If you do not know what this is, leave this field empty.'),
|
||||
])
|
||||
warn_issue4566 = is_restoring and self.seed_type == 'bip39'
|
||||
self.line_dialog(title=title, message=message, warning=warning,
|
||||
default='', test=lambda x:True, run_next=run_next,
|
||||
warn_issue4566=warn_issue4566)
|
||||
|
||||
def restore_from_seed(self):
|
||||
self.opt_bip39 = True
|
||||
self.opt_ext = True
|
||||
is_cosigning_seed = lambda x: bitcoin.seed_type(x) in ['standard', 'segwit']
|
||||
test = bitcoin.is_seed if self.wallet_type == 'standard' else is_cosigning_seed
|
||||
self.restore_seed_dialog(run_next=self.on_restore_seed, test=test)
|
||||
|
||||
def on_restore_seed(self, seed, is_bip39, is_ext):
|
||||
self.seed_type = 'bip39' if is_bip39 else bitcoin.seed_type(seed)
|
||||
if self.seed_type == 'bip39':
|
||||
f = lambda passphrase: self.on_restore_bip39(seed, passphrase)
|
||||
self.passphrase_dialog(run_next=f, is_restoring=True) if is_ext else f('')
|
||||
elif self.seed_type in ['standard', 'segwit']:
|
||||
f = lambda passphrase: self.run('create_keystore', seed, passphrase)
|
||||
self.passphrase_dialog(run_next=f, is_restoring=True) if is_ext else f('')
|
||||
elif self.seed_type == 'old':
|
||||
self.run('create_keystore', seed, '')
|
||||
elif bitcoin.is_any_2fa_seed_type(self.seed_type):
|
||||
self.load_2fa()
|
||||
self.run('on_restore_seed', seed, is_ext)
|
||||
else:
|
||||
raise Exception('Unknown seed type', self.seed_type)
|
||||
|
||||
def on_restore_bip39(self, seed, passphrase):
|
||||
def f(derivation, script_type):
|
||||
self.run('on_bip43', seed, passphrase, derivation, script_type)
|
||||
self.derivation_and_script_type_dialog(f)
|
||||
|
||||
def create_keystore(self, seed, passphrase):
|
||||
k = keystore.from_seed(seed, passphrase, self.wallet_type == 'multisig')
|
||||
self.on_keystore(k)
|
||||
|
||||
def on_bip43(self, seed, passphrase, derivation, script_type):
|
||||
k = keystore.from_bip39_seed(seed, passphrase, derivation, xtype=script_type)
|
||||
self.on_keystore(k)
|
||||
|
||||
def on_keystore(self, k):
|
||||
has_xpub = isinstance(k, keystore.Xpub)
|
||||
if has_xpub:
|
||||
t1 = xpub_type(k.xpub)
|
||||
if self.wallet_type == 'standard':
|
||||
if has_xpub and t1 not in ['standard', 'p2wpkh', 'p2wpkh-p2sh']:
|
||||
self.show_error(_('Wrong key type') + ' %s'%t1)
|
||||
self.run('choose_keystore')
|
||||
return
|
||||
self.keystores.append(k)
|
||||
self.run('create_wallet')
|
||||
elif self.wallet_type == 'multisig':
|
||||
assert has_xpub
|
||||
if t1 not in ['standard', 'p2wsh', 'p2wsh-p2sh']:
|
||||
self.show_error(_('Wrong key type') + ' %s'%t1)
|
||||
self.run('choose_keystore')
|
||||
return
|
||||
if k.xpub in map(lambda x: x.xpub, self.keystores):
|
||||
self.show_error(_('Error: duplicate master public key'))
|
||||
self.run('choose_keystore')
|
||||
return
|
||||
if len(self.keystores)>0:
|
||||
t2 = xpub_type(self.keystores[0].xpub)
|
||||
if t1 != t2:
|
||||
self.show_error(_('Cannot add this cosigner:') + '\n' + "Their key type is '%s', we are '%s'"%(t1, t2))
|
||||
self.run('choose_keystore')
|
||||
return
|
||||
self.keystores.append(k)
|
||||
if len(self.keystores) == 1:
|
||||
xpub = k.get_master_public_key()
|
||||
self.reset_stack()
|
||||
self.run('show_xpub_and_add_cosigners', xpub)
|
||||
elif len(self.keystores) < self.n:
|
||||
self.run('choose_keystore')
|
||||
else:
|
||||
self.run('create_wallet')
|
||||
|
||||
def create_wallet(self):
|
||||
encrypt_keystore = any(k.may_have_password() for k in self.keystores)
|
||||
# note: the following condition ("if") is duplicated logic from
|
||||
# wallet.get_available_storage_encryption_version()
|
||||
if self.wallet_type == 'standard' and isinstance(self.keystores[0], keystore.Hardware_KeyStore):
|
||||
# offer encrypting with a pw derived from the hw device
|
||||
k = self.keystores[0]
|
||||
try:
|
||||
k.handler = self.plugin.create_handler(self)
|
||||
password = k.get_password_for_storage_encryption()
|
||||
except UserCancelled:
|
||||
devmgr = self.plugins.device_manager
|
||||
devmgr.unpair_xpub(k.xpub)
|
||||
self.choose_hw_device()
|
||||
return
|
||||
except BaseException as e:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
self.show_error(str(e))
|
||||
return
|
||||
self.request_storage_encryption(
|
||||
run_next=lambda encrypt_storage: self.on_password(
|
||||
password,
|
||||
encrypt_storage=encrypt_storage,
|
||||
storage_enc_version=STO_EV_XPUB_PW,
|
||||
encrypt_keystore=False))
|
||||
else:
|
||||
# prompt the user to set an arbitrary password
|
||||
self.request_password(
|
||||
run_next=lambda password, encrypt_storage: self.on_password(
|
||||
password,
|
||||
encrypt_storage=encrypt_storage,
|
||||
storage_enc_version=STO_EV_USER_PW,
|
||||
encrypt_keystore=encrypt_keystore),
|
||||
force_disable_encrypt_cb=not encrypt_keystore)
|
||||
|
||||
def on_password(self, password, *, encrypt_storage,
|
||||
storage_enc_version=STO_EV_USER_PW, encrypt_keystore):
|
||||
assert not self.storage.file_exists(), "file was created too soon! plaintext keys might have been written to disk"
|
||||
self.storage.set_keystore_encryption(bool(password) and encrypt_keystore)
|
||||
if encrypt_storage:
|
||||
self.storage.set_password(password, enc_version=storage_enc_version)
|
||||
for k in self.keystores:
|
||||
if k.may_have_password():
|
||||
k.update_password(None, password)
|
||||
if self.wallet_type == 'standard':
|
||||
self.storage.put('seed_type', self.seed_type)
|
||||
keys = self.keystores[0].dump()
|
||||
self.storage.put('keystore', keys)
|
||||
self.wallet = Standard_Wallet(self.storage)
|
||||
self.run('create_addresses')
|
||||
elif self.wallet_type == 'multisig':
|
||||
for i, k in enumerate(self.keystores):
|
||||
self.storage.put('x%d/'%(i+1), k.dump())
|
||||
self.storage.write()
|
||||
self.wallet = Multisig_Wallet(self.storage)
|
||||
self.run('create_addresses')
|
||||
elif self.wallet_type == 'imported':
|
||||
if len(self.keystores) > 0:
|
||||
keys = self.keystores[0].dump()
|
||||
self.storage.put('keystore', keys)
|
||||
self.wallet = Imported_Wallet(self.storage)
|
||||
self.wallet.storage.write()
|
||||
self.terminate()
|
||||
|
||||
def show_xpub_and_add_cosigners(self, xpub):
|
||||
self.show_xpub_dialog(xpub=xpub, run_next=lambda x: self.run('choose_keystore'))
|
||||
|
||||
def choose_seed_type(self, message=None, choices=None):
|
||||
title = _('Choose Seed type')
|
||||
if message is None:
|
||||
message = ' '.join([
|
||||
_("The type of addresses used by your wallet will depend on your seed.")
|
||||
])
|
||||
if choices is None:
|
||||
choices = [
|
||||
('create_standard_seed', _('Legacy')),
|
||||
]
|
||||
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.run)
|
||||
|
||||
def create_segwit_seed(self): self.create_seed('segwit')
|
||||
def create_standard_seed(self): self.create_seed('standard')
|
||||
|
||||
def create_seed(self, seed_type):
|
||||
from . import mnemonic
|
||||
self.seed_type = seed_type
|
||||
seed = mnemonic.Mnemonic('en').make_seed(self.seed_type)
|
||||
self.opt_bip39 = False
|
||||
f = lambda x: self.request_passphrase(seed, x)
|
||||
self.show_seed_dialog(run_next=f, seed_text=seed)
|
||||
|
||||
def request_passphrase(self, seed, opt_passphrase):
|
||||
if opt_passphrase:
|
||||
f = lambda x: self.confirm_seed(seed, x)
|
||||
self.passphrase_dialog(run_next=f)
|
||||
else:
|
||||
self.run('confirm_seed', seed, '')
|
||||
|
||||
def confirm_seed(self, seed, passphrase):
|
||||
f = lambda x: self.confirm_passphrase(seed, passphrase)
|
||||
self.confirm_seed_dialog(run_next=f, test=lambda x: x==seed)
|
||||
|
||||
def confirm_passphrase(self, seed, passphrase):
|
||||
f = lambda x: self.run('create_keystore', seed, x)
|
||||
if passphrase:
|
||||
title = _('Confirm Seed Extension')
|
||||
message = '\n'.join([
|
||||
_('Your seed extension must be saved together with your seed.'),
|
||||
_('Please type it here.'),
|
||||
])
|
||||
self.line_dialog(run_next=f, title=title, message=message, default='', test=lambda x: x==passphrase)
|
||||
else:
|
||||
f('')
|
||||
|
||||
def create_addresses(self):
|
||||
def task():
|
||||
self.wallet.synchronize()
|
||||
self.wallet.storage.write()
|
||||
self.terminate()
|
||||
msg = _("Electrum is generating your addresses, please wait...")
|
||||
self.waiting_dialog(task, msg)
|
||||
@ -1,269 +0,0 @@
|
||||
# Copyright (C) 2018 The Electrum developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file LICENCE or http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import hashlib
|
||||
from typing import List
|
||||
|
||||
from .util import bfh, bh2u, BitcoinException, print_error
|
||||
from . import constants
|
||||
from . import ecc
|
||||
from .crypto import hash_160, hmac_oneshot
|
||||
from .bitcoin import rev_hex, int_to_hex, EncodeBase58Check, DecodeBase58Check
|
||||
|
||||
|
||||
BIP32_PRIME = 0x80000000
|
||||
|
||||
|
||||
def protect_against_invalid_ecpoint(func):
|
||||
def func_wrapper(*args):
|
||||
n = args[-1]
|
||||
while True:
|
||||
is_prime = n & BIP32_PRIME
|
||||
try:
|
||||
return func(*args[:-1], n=n)
|
||||
except ecc.InvalidECPointException:
|
||||
print_error('bip32 protect_against_invalid_ecpoint: skipping index')
|
||||
n += 1
|
||||
is_prime2 = n & BIP32_PRIME
|
||||
if is_prime != is_prime2: raise OverflowError()
|
||||
return func_wrapper
|
||||
|
||||
|
||||
# Child private key derivation function (from master private key)
|
||||
# k = master private key (32 bytes)
|
||||
# c = master chain code (extra entropy for key derivation) (32 bytes)
|
||||
# n = the index of the key we want to derive. (only 32 bits will be used)
|
||||
# If n is hardened (i.e. the 32nd bit is set), the resulting private key's
|
||||
# corresponding public key can NOT be determined without the master private key.
|
||||
# However, if n is not hardened, the resulting private key's corresponding
|
||||
# public key can be determined without the master private key.
|
||||
@protect_against_invalid_ecpoint
|
||||
def CKD_priv(k, c, n):
|
||||
if n < 0: raise ValueError('the bip32 index needs to be non-negative')
|
||||
is_prime = n & BIP32_PRIME
|
||||
return _CKD_priv(k, c, bfh(rev_hex(int_to_hex(n,4))), is_prime)
|
||||
|
||||
|
||||
def _CKD_priv(k, c, s, is_prime):
|
||||
try:
|
||||
keypair = ecc.ECPrivkey(k)
|
||||
except ecc.InvalidECPointException as e:
|
||||
raise BitcoinException('Impossible xprv (not within curve order)') from e
|
||||
cK = keypair.get_public_key_bytes(compressed=True)
|
||||
data = bytes([0]) + k + s if is_prime else cK + s
|
||||
I = hmac_oneshot(c, data, hashlib.sha512)
|
||||
I_left = ecc.string_to_number(I[0:32])
|
||||
k_n = (I_left + ecc.string_to_number(k)) % ecc.CURVE_ORDER
|
||||
if I_left >= ecc.CURVE_ORDER or k_n == 0:
|
||||
raise ecc.InvalidECPointException()
|
||||
k_n = ecc.number_to_string(k_n, ecc.CURVE_ORDER)
|
||||
c_n = I[32:]
|
||||
return k_n, c_n
|
||||
|
||||
# Child public key derivation function (from public key only)
|
||||
# K = master public key
|
||||
# c = master chain code
|
||||
# n = index of key we want to derive
|
||||
# This function allows us to find the nth public key, as long as n is
|
||||
# not hardened. If n is hardened, we need the master private key to find it.
|
||||
@protect_against_invalid_ecpoint
|
||||
def CKD_pub(cK, c, n):
|
||||
if n < 0: raise ValueError('the bip32 index needs to be non-negative')
|
||||
if n & BIP32_PRIME: raise Exception()
|
||||
return _CKD_pub(cK, c, bfh(rev_hex(int_to_hex(n,4))))
|
||||
|
||||
# helper function, callable with arbitrary string.
|
||||
# note: 's' does not need to fit into 32 bits here! (c.f. trustedcoin billing)
|
||||
def _CKD_pub(cK, c, s):
|
||||
I = hmac_oneshot(c, cK + s, hashlib.sha512)
|
||||
pubkey = ecc.ECPrivkey(I[0:32]) + ecc.ECPubkey(cK)
|
||||
if pubkey.is_at_infinity():
|
||||
raise ecc.InvalidECPointException()
|
||||
cK_n = pubkey.get_public_key_bytes(compressed=True)
|
||||
c_n = I[32:]
|
||||
return cK_n, c_n
|
||||
|
||||
|
||||
def xprv_header(xtype, *, net=None):
|
||||
if net is None:
|
||||
net = constants.net
|
||||
return bfh("%08x" % net.XPRV_HEADERS[xtype])
|
||||
|
||||
|
||||
def xpub_header(xtype, *, net=None):
|
||||
if net is None:
|
||||
net = constants.net
|
||||
return bfh("%08x" % net.XPUB_HEADERS[xtype])
|
||||
|
||||
|
||||
def serialize_xprv(xtype, c, k, depth=0, fingerprint=b'\x00'*4,
|
||||
child_number=b'\x00'*4, *, net=None):
|
||||
if not ecc.is_secret_within_curve_range(k):
|
||||
raise BitcoinException('Impossible xprv (not within curve order)')
|
||||
xprv = xprv_header(xtype, net=net) \
|
||||
+ bytes([depth]) + fingerprint + child_number + c + bytes([0]) + k
|
||||
return EncodeBase58Check(xprv)
|
||||
|
||||
|
||||
def serialize_xpub(xtype, c, cK, depth=0, fingerprint=b'\x00'*4,
|
||||
child_number=b'\x00'*4, *, net=None):
|
||||
xpub = xpub_header(xtype, net=net) \
|
||||
+ bytes([depth]) + fingerprint + child_number + c + cK
|
||||
return EncodeBase58Check(xpub)
|
||||
|
||||
|
||||
class InvalidMasterKeyVersionBytes(BitcoinException): pass
|
||||
|
||||
|
||||
def deserialize_xkey(xkey, prv, *, net=None):
|
||||
if net is None:
|
||||
net = constants.net
|
||||
xkey = DecodeBase58Check(xkey)
|
||||
if len(xkey) != 78:
|
||||
raise BitcoinException('Invalid length for extended key: {}'
|
||||
.format(len(xkey)))
|
||||
depth = xkey[4]
|
||||
fingerprint = xkey[5:9]
|
||||
child_number = xkey[9:13]
|
||||
c = xkey[13:13+32]
|
||||
header = int.from_bytes(xkey[0:4], byteorder='big')
|
||||
headers = net.XPRV_HEADERS if prv else net.XPUB_HEADERS
|
||||
if header not in headers.values():
|
||||
raise InvalidMasterKeyVersionBytes('Invalid extended key format: {}'
|
||||
.format(hex(header)))
|
||||
xtype = list(headers.keys())[list(headers.values()).index(header)]
|
||||
n = 33 if prv else 32
|
||||
K_or_k = xkey[13+n:]
|
||||
if prv and not ecc.is_secret_within_curve_range(K_or_k):
|
||||
raise BitcoinException('Impossible xprv (not within curve order)')
|
||||
return xtype, depth, fingerprint, child_number, c, K_or_k
|
||||
|
||||
|
||||
def deserialize_xpub(xkey, *, net=None):
|
||||
return deserialize_xkey(xkey, False, net=net)
|
||||
|
||||
def deserialize_xprv(xkey, *, net=None):
|
||||
return deserialize_xkey(xkey, True, net=net)
|
||||
|
||||
def xpub_type(x):
|
||||
return deserialize_xpub(x)[0]
|
||||
|
||||
|
||||
def is_xpub(text):
|
||||
try:
|
||||
deserialize_xpub(text)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def is_xprv(text):
|
||||
try:
|
||||
deserialize_xprv(text)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def xpub_from_xprv(xprv):
|
||||
xtype, depth, fingerprint, child_number, c, k = deserialize_xprv(xprv)
|
||||
cK = ecc.ECPrivkey(k).get_public_key_bytes(compressed=True)
|
||||
return serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
|
||||
|
||||
|
||||
def bip32_root(seed, xtype):
|
||||
I = hmac_oneshot(b"Bitcoin seed", seed, hashlib.sha512)
|
||||
master_k = I[0:32]
|
||||
master_c = I[32:]
|
||||
# create xprv first, as that will check if master_k is within curve order
|
||||
xprv = serialize_xprv(xtype, master_c, master_k)
|
||||
cK = ecc.ECPrivkey(master_k).get_public_key_bytes(compressed=True)
|
||||
xpub = serialize_xpub(xtype, master_c, cK)
|
||||
return xprv, xpub
|
||||
|
||||
|
||||
def xpub_from_pubkey(xtype, cK):
|
||||
if cK[0] not in (0x02, 0x03):
|
||||
raise ValueError('Unexpected first byte: {}'.format(cK[0]))
|
||||
return serialize_xpub(xtype, b'\x00'*32, cK)
|
||||
|
||||
|
||||
def bip32_derivation(s: str) -> int:
|
||||
if not s.startswith('m/'):
|
||||
raise ValueError('invalid bip32 derivation path: {}'.format(s))
|
||||
s = s[2:]
|
||||
for n in s.split('/'):
|
||||
if n == '': continue
|
||||
i = int(n[:-1]) + BIP32_PRIME if n[-1] == "'" else int(n)
|
||||
yield i
|
||||
|
||||
def convert_bip32_path_to_list_of_uint32(n: str) -> List[int]:
|
||||
"""Convert bip32 path to list of uint32 integers with prime flags
|
||||
m/0/-1/1' -> [0, 0x80000001, 0x80000001]
|
||||
|
||||
based on code in trezorlib
|
||||
"""
|
||||
path = []
|
||||
for x in n.split('/')[1:]:
|
||||
if x == '': continue
|
||||
prime = 0
|
||||
if x.endswith("'"):
|
||||
x = x.replace('\'', '')
|
||||
prime = BIP32_PRIME
|
||||
if x.startswith('-'):
|
||||
prime = BIP32_PRIME
|
||||
path.append(abs(int(x)) | prime)
|
||||
return path
|
||||
|
||||
def is_bip32_derivation(x: str) -> bool:
|
||||
try:
|
||||
[ i for i in bip32_derivation(x)]
|
||||
return True
|
||||
except :
|
||||
return False
|
||||
|
||||
def bip32_private_derivation(xprv, branch, sequence):
|
||||
if not sequence.startswith(branch):
|
||||
raise ValueError('incompatible branch ({}) and sequence ({})'
|
||||
.format(branch, sequence))
|
||||
if branch == sequence:
|
||||
return xprv, xpub_from_xprv(xprv)
|
||||
xtype, depth, fingerprint, child_number, c, k = deserialize_xprv(xprv)
|
||||
sequence = sequence[len(branch):]
|
||||
for n in sequence.split('/'):
|
||||
if n == '': continue
|
||||
i = int(n[:-1]) + BIP32_PRIME if n[-1] == "'" else int(n)
|
||||
parent_k = k
|
||||
k, c = CKD_priv(k, c, i)
|
||||
depth += 1
|
||||
parent_cK = ecc.ECPrivkey(parent_k).get_public_key_bytes(compressed=True)
|
||||
fingerprint = hash_160(parent_cK)[0:4]
|
||||
child_number = bfh("%08X"%i)
|
||||
cK = ecc.ECPrivkey(k).get_public_key_bytes(compressed=True)
|
||||
xpub = serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
|
||||
xprv = serialize_xprv(xtype, c, k, depth, fingerprint, child_number)
|
||||
return xprv, xpub
|
||||
|
||||
|
||||
def bip32_public_derivation(xpub, branch, sequence):
|
||||
xtype, depth, fingerprint, child_number, c, cK = deserialize_xpub(xpub)
|
||||
if not sequence.startswith(branch):
|
||||
raise ValueError('incompatible branch ({}) and sequence ({})'
|
||||
.format(branch, sequence))
|
||||
sequence = sequence[len(branch):]
|
||||
for n in sequence.split('/'):
|
||||
if n == '': continue
|
||||
i = int(n)
|
||||
parent_cK = cK
|
||||
cK, c = CKD_pub(cK, c, i)
|
||||
depth += 1
|
||||
fingerprint = hash_160(parent_cK)[0:4]
|
||||
child_number = bfh("%08X"%i)
|
||||
return serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
|
||||
|
||||
|
||||
def bip32_private_key(sequence, k, chain):
|
||||
for i in sequence:
|
||||
k, chain = CKD_priv(k, chain, i)
|
||||
return k
|
||||
@ -1,552 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2011 thomasv@gitorious
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import hashlib
|
||||
from typing import List, Tuple, TYPE_CHECKING, Optional, Union
|
||||
|
||||
from .util import bfh, bh2u, BitcoinException, assert_bytes, to_bytes, inv_dict
|
||||
from . import version
|
||||
from . import segwit_addr
|
||||
from . import constants
|
||||
from . import ecc
|
||||
from .crypto import sha256d, sha256, hash_160, hmac_oneshot
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .network import Network
|
||||
|
||||
|
||||
################################## transactions
|
||||
|
||||
COINBASE_MATURITY = 100
|
||||
COIN = 100000000
|
||||
TOTAL_COIN_SUPPLY_LIMIT_IN_BTC = 21000000
|
||||
|
||||
# supported types of transaction outputs
|
||||
TYPE_ADDRESS = 0
|
||||
TYPE_PUBKEY = 1
|
||||
TYPE_SCRIPT = 2
|
||||
|
||||
|
||||
def rev_hex(s: str) -> str:
|
||||
return bh2u(bfh(s)[::-1])
|
||||
|
||||
|
||||
def int_to_hex(i: int, length: int=1) -> str:
|
||||
"""Converts int to little-endian hex string.
|
||||
`length` is the number of bytes available
|
||||
"""
|
||||
if not isinstance(i, int):
|
||||
raise TypeError('{} instead of int'.format(i))
|
||||
range_size = pow(256, length)
|
||||
if i < -(range_size//2) or i >= range_size:
|
||||
raise OverflowError('cannot convert int {} to hex ({} bytes)'.format(i, length))
|
||||
if i < 0:
|
||||
# two's complement
|
||||
i = range_size + i
|
||||
s = hex(i)[2:].rstrip('L')
|
||||
s = "0"*(2*length - len(s)) + s
|
||||
return rev_hex(s)
|
||||
|
||||
def script_num_to_hex(i: int) -> str:
|
||||
"""See CScriptNum in Bitcoin Core.
|
||||
Encodes an integer as hex, to be used in script.
|
||||
|
||||
ported from https://github.com/bitcoin/bitcoin/blob/8cbc5c4be4be22aca228074f087a374a7ec38be8/src/script/script.h#L326
|
||||
"""
|
||||
if i == 0:
|
||||
return ''
|
||||
|
||||
result = bytearray()
|
||||
neg = i < 0
|
||||
absvalue = abs(i)
|
||||
while absvalue > 0:
|
||||
result.append(absvalue & 0xff)
|
||||
absvalue >>= 8
|
||||
|
||||
if result[-1] & 0x80:
|
||||
result.append(0x80 if neg else 0x00)
|
||||
elif neg:
|
||||
result[-1] |= 0x80
|
||||
|
||||
return bh2u(result)
|
||||
|
||||
|
||||
def var_int(i: int) -> str:
|
||||
# https://en.bitcoin.it/wiki/Protocol_specification#Variable_length_integer
|
||||
if i<0xfd:
|
||||
return int_to_hex(i)
|
||||
elif i<=0xffff:
|
||||
return "fd"+int_to_hex(i,2)
|
||||
elif i<=0xffffffff:
|
||||
return "fe"+int_to_hex(i,4)
|
||||
else:
|
||||
return "ff"+int_to_hex(i,8)
|
||||
|
||||
|
||||
def witness_push(item: str) -> str:
|
||||
"""Returns data in the form it should be present in the witness.
|
||||
hex -> hex
|
||||
"""
|
||||
return var_int(len(item) // 2) + item
|
||||
|
||||
|
||||
def op_push(i: int) -> str:
|
||||
if i<0x4c: # OP_PUSHDATA1
|
||||
return int_to_hex(i)
|
||||
elif i<=0xff:
|
||||
return '4c' + int_to_hex(i)
|
||||
elif i<=0xffff:
|
||||
return '4d' + int_to_hex(i,2)
|
||||
else:
|
||||
return '4e' + int_to_hex(i,4)
|
||||
|
||||
|
||||
def push_script(data: str) -> str:
|
||||
"""Returns pushed data to the script, automatically
|
||||
choosing canonical opcodes depending on the length of the data.
|
||||
hex -> hex
|
||||
|
||||
ported from https://github.com/btcsuite/btcd/blob/fdc2bc867bda6b351191b5872d2da8270df00d13/txscript/scriptbuilder.go#L128
|
||||
"""
|
||||
data = bfh(data)
|
||||
from .transaction import opcodes
|
||||
|
||||
data_len = len(data)
|
||||
|
||||
# "small integer" opcodes
|
||||
if data_len == 0 or data_len == 1 and data[0] == 0:
|
||||
return bh2u(bytes([opcodes.OP_0]))
|
||||
elif data_len == 1 and data[0] <= 16:
|
||||
return bh2u(bytes([opcodes.OP_1 - 1 + data[0]]))
|
||||
elif data_len == 1 and data[0] == 0x81:
|
||||
return bh2u(bytes([opcodes.OP_1NEGATE]))
|
||||
|
||||
return op_push(data_len) + bh2u(data)
|
||||
|
||||
|
||||
def add_number_to_script(i: int) -> bytes:
|
||||
return bfh(push_script(script_num_to_hex(i)))
|
||||
|
||||
|
||||
def relayfee(network: 'Network'=None) -> int:
|
||||
from .simple_config import FEERATE_DEFAULT_RELAY
|
||||
MAX_RELAY_FEE = 50000
|
||||
f = network.relay_fee if network and network.relay_fee else FEERATE_DEFAULT_RELAY
|
||||
return min(f, MAX_RELAY_FEE)
|
||||
|
||||
|
||||
def dust_threshold(network: 'Network'=None) -> int:
|
||||
# Change <= dust threshold is added to the tx fee
|
||||
return 182 * 3 * relayfee(network) // 1000
|
||||
|
||||
|
||||
def hash_encode(x: bytes) -> str:
|
||||
return bh2u(x[::-1])
|
||||
|
||||
|
||||
def hash_decode(x: str) -> bytes:
|
||||
return bfh(x)[::-1]
|
||||
|
||||
|
||||
################################## electrum seeds
|
||||
|
||||
|
||||
def is_new_seed(x: str, prefix=version.SEED_PREFIX) -> bool:
|
||||
from . import mnemonic
|
||||
x = mnemonic.normalize_text(x)
|
||||
s = bh2u(hmac_oneshot(b"Seed version", x.encode('utf8'), hashlib.sha512))
|
||||
return s.startswith(prefix)
|
||||
|
||||
|
||||
def is_old_seed(seed: str) -> bool:
|
||||
from . import old_mnemonic, mnemonic
|
||||
seed = mnemonic.normalize_text(seed)
|
||||
words = seed.split()
|
||||
try:
|
||||
# checks here are deliberately left weak for legacy reasons, see #3149
|
||||
old_mnemonic.mn_decode(words)
|
||||
uses_electrum_words = True
|
||||
except Exception:
|
||||
uses_electrum_words = False
|
||||
try:
|
||||
seed = bfh(seed)
|
||||
is_hex = (len(seed) == 16 or len(seed) == 32)
|
||||
except Exception:
|
||||
is_hex = False
|
||||
return is_hex or (uses_electrum_words and (len(words) == 12 or len(words) == 24))
|
||||
|
||||
|
||||
def seed_type(x: str) -> str:
|
||||
if is_old_seed(x):
|
||||
return 'old'
|
||||
elif is_new_seed(x):
|
||||
return 'standard'
|
||||
elif is_new_seed(x, version.SEED_PREFIX_SW):
|
||||
return 'segwit'
|
||||
elif is_new_seed(x, version.SEED_PREFIX_2FA):
|
||||
return '2fa'
|
||||
elif is_new_seed(x, version.SEED_PREFIX_2FA_SW):
|
||||
return '2fa_segwit'
|
||||
return ''
|
||||
|
||||
|
||||
def is_seed(x: str) -> bool:
|
||||
return bool(seed_type(x))
|
||||
|
||||
|
||||
def is_any_2fa_seed_type(seed_type):
|
||||
return seed_type in ['2fa', '2fa_segwit']
|
||||
|
||||
|
||||
############ functions from pywallet #####################
|
||||
|
||||
def hash160_to_b58_address(h160: bytes, addrtype: int) -> str:
|
||||
s = bytes([addrtype]) + h160
|
||||
s = s + sha256d(s)[0:4]
|
||||
return base_encode(s, base=58)
|
||||
|
||||
|
||||
def b58_address_to_hash160(addr: str) -> Tuple[int, bytes]:
|
||||
addr = to_bytes(addr, 'ascii')
|
||||
_bytes = base_decode(addr, 25, base=58)
|
||||
return _bytes[0], _bytes[1:21]
|
||||
|
||||
|
||||
def hash160_to_p2pkh(h160: bytes, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
return hash160_to_b58_address(h160, net.ADDRTYPE_P2PKH)
|
||||
|
||||
def hash160_to_p2sh(h160: bytes, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
return hash160_to_b58_address(h160, net.ADDRTYPE_P2SH)
|
||||
|
||||
def public_key_to_p2pkh(public_key: bytes, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
return hash160_to_p2pkh(hash_160(public_key), net=net)
|
||||
|
||||
def hash_to_segwit_addr(h: bytes, witver: int, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
return segwit_addr.encode(net.SEGWIT_HRP, witver, h)
|
||||
|
||||
def public_key_to_p2wpkh(public_key: bytes, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
return hash_to_segwit_addr(hash_160(public_key), witver=0, net=net)
|
||||
|
||||
def script_to_p2wsh(script: str, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
return hash_to_segwit_addr(sha256(bfh(script)), witver=0, net=net)
|
||||
|
||||
def p2wpkh_nested_script(pubkey: str) -> str:
|
||||
pkh = bh2u(hash_160(bfh(pubkey)))
|
||||
return '00' + push_script(pkh)
|
||||
|
||||
def p2wsh_nested_script(witness_script: str) -> str:
|
||||
wsh = bh2u(sha256(bfh(witness_script)))
|
||||
return '00' + push_script(wsh)
|
||||
|
||||
def pubkey_to_address(txin_type: str, pubkey: str, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
if txin_type == 'p2pkh':
|
||||
return public_key_to_p2pkh(bfh(pubkey), net=net)
|
||||
elif txin_type == 'p2wpkh':
|
||||
return public_key_to_p2wpkh(bfh(pubkey), net=net)
|
||||
elif txin_type == 'p2wpkh-p2sh':
|
||||
scriptSig = p2wpkh_nested_script(pubkey)
|
||||
return hash160_to_p2sh(hash_160(bfh(scriptSig)), net=net)
|
||||
else:
|
||||
raise NotImplementedError(txin_type)
|
||||
|
||||
def redeem_script_to_address(txin_type: str, redeem_script: str, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
if txin_type == 'p2sh':
|
||||
return hash160_to_p2sh(hash_160(bfh(redeem_script)), net=net)
|
||||
elif txin_type == 'p2wsh':
|
||||
return script_to_p2wsh(redeem_script, net=net)
|
||||
elif txin_type == 'p2wsh-p2sh':
|
||||
scriptSig = p2wsh_nested_script(redeem_script)
|
||||
return hash160_to_p2sh(hash_160(bfh(scriptSig)), net=net)
|
||||
else:
|
||||
raise NotImplementedError(txin_type)
|
||||
|
||||
|
||||
def script_to_address(script: str, *, net=None) -> str:
|
||||
from .transaction import get_address_from_output_script
|
||||
t, addr = get_address_from_output_script(bfh(script), net=net)
|
||||
assert t == TYPE_ADDRESS
|
||||
return addr
|
||||
|
||||
def address_to_script(addr: str, *, net=None) -> str:
|
||||
if net is None: net = constants.net
|
||||
if not is_address(addr, net=net):
|
||||
raise BitcoinException(f"invalid bitcoin address: {addr}")
|
||||
witver, witprog = segwit_addr.decode(net.SEGWIT_HRP, addr)
|
||||
if witprog is not None:
|
||||
if not (0 <= witver <= 16):
|
||||
raise BitcoinException(f'impossible witness version: {witver}')
|
||||
OP_n = witver + 0x50 if witver > 0 else 0
|
||||
script = bh2u(bytes([OP_n]))
|
||||
script += push_script(bh2u(bytes(witprog)))
|
||||
return script
|
||||
addrtype, hash_160_ = b58_address_to_hash160(addr)
|
||||
if addrtype == net.ADDRTYPE_P2PKH:
|
||||
script = '76a9' # op_dup, op_hash_160
|
||||
script += push_script(bh2u(hash_160_))
|
||||
script += '88ac' # op_equalverify, op_checksig
|
||||
elif addrtype == net.ADDRTYPE_P2SH:
|
||||
script = 'a9' # op_hash_160
|
||||
script += push_script(bh2u(hash_160_))
|
||||
script += '87' # op_equal
|
||||
else:
|
||||
raise BitcoinException(f'unknown address type: {addrtype}')
|
||||
return script
|
||||
|
||||
def address_to_scripthash(addr: str) -> str:
|
||||
script = address_to_script(addr)
|
||||
return script_to_scripthash(script)
|
||||
|
||||
def script_to_scripthash(script: str) -> str:
|
||||
h = sha256(bfh(script))[0:32]
|
||||
return bh2u(bytes(reversed(h)))
|
||||
|
||||
def public_key_to_p2pk_script(pubkey: str) -> str:
|
||||
script = push_script(pubkey)
|
||||
script += 'ac' # op_checksig
|
||||
return script
|
||||
|
||||
__b58chars = b'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
assert len(__b58chars) == 58
|
||||
|
||||
__b43chars = b'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ$*+-./:'
|
||||
assert len(__b43chars) == 43
|
||||
|
||||
|
||||
def base_encode(v: bytes, base: int) -> str:
|
||||
""" encode v, which is a string of bytes, to base58."""
|
||||
assert_bytes(v)
|
||||
if base not in (58, 43):
|
||||
raise ValueError('not supported base: {}'.format(base))
|
||||
chars = __b58chars
|
||||
if base == 43:
|
||||
chars = __b43chars
|
||||
long_value = 0
|
||||
for (i, c) in enumerate(v[::-1]):
|
||||
long_value += (256**i) * c
|
||||
result = bytearray()
|
||||
while long_value >= base:
|
||||
div, mod = divmod(long_value, base)
|
||||
result.append(chars[mod])
|
||||
long_value = div
|
||||
result.append(chars[long_value])
|
||||
# Bitcoin does a little leading-zero-compression:
|
||||
# leading 0-bytes in the input become leading-1s
|
||||
nPad = 0
|
||||
for c in v:
|
||||
if c == 0x00:
|
||||
nPad += 1
|
||||
else:
|
||||
break
|
||||
result.extend([chars[0]] * nPad)
|
||||
result.reverse()
|
||||
return result.decode('ascii')
|
||||
|
||||
|
||||
def base_decode(v: Union[bytes, str], length: Optional[int], base: int) -> Optional[bytes]:
|
||||
""" decode v into a string of len bytes."""
|
||||
# assert_bytes(v)
|
||||
v = to_bytes(v, 'ascii')
|
||||
if base not in (58, 43):
|
||||
raise ValueError('not supported base: {}'.format(base))
|
||||
chars = __b58chars
|
||||
if base == 43:
|
||||
chars = __b43chars
|
||||
long_value = 0
|
||||
for (i, c) in enumerate(v[::-1]):
|
||||
digit = chars.find(bytes([c]))
|
||||
if digit == -1:
|
||||
raise ValueError('Forbidden character {} for base {}'.format(c, base))
|
||||
long_value += digit * (base**i)
|
||||
result = bytearray()
|
||||
while long_value >= 256:
|
||||
div, mod = divmod(long_value, 256)
|
||||
result.append(mod)
|
||||
long_value = div
|
||||
result.append(long_value)
|
||||
nPad = 0
|
||||
for c in v:
|
||||
if c == chars[0]:
|
||||
nPad += 1
|
||||
else:
|
||||
break
|
||||
result.extend(b'\x00' * nPad)
|
||||
if length is not None and len(result) != length:
|
||||
return None
|
||||
result.reverse()
|
||||
return bytes(result)
|
||||
|
||||
|
||||
class InvalidChecksum(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def EncodeBase58Check(vchIn: bytes) -> str:
|
||||
hash = sha256d(vchIn)
|
||||
return base_encode(vchIn + hash[0:4], base=58)
|
||||
|
||||
|
||||
def DecodeBase58Check(psz: Union[bytes, str]) -> bytes:
|
||||
vchRet = base_decode(psz, None, base=58)
|
||||
payload = vchRet[0:-4]
|
||||
csum_found = vchRet[-4:]
|
||||
csum_calculated = sha256d(payload)[0:4]
|
||||
if csum_calculated != csum_found:
|
||||
raise InvalidChecksum(f'calculated {bh2u(csum_calculated)}, found {bh2u(csum_found)}')
|
||||
else:
|
||||
return payload
|
||||
|
||||
|
||||
# backwards compat
|
||||
# extended WIF for segwit (used in 3.0.x; but still used internally)
|
||||
# the keys in this dict should be a superset of what Imported Wallets can import
|
||||
WIF_SCRIPT_TYPES = {
|
||||
'p2pkh':0,
|
||||
'p2wpkh':1,
|
||||
'p2wpkh-p2sh':2,
|
||||
'p2sh':5,
|
||||
'p2wsh':6,
|
||||
'p2wsh-p2sh':7
|
||||
}
|
||||
WIF_SCRIPT_TYPES_INV = inv_dict(WIF_SCRIPT_TYPES)
|
||||
|
||||
|
||||
|
||||
def serialize_privkey(secret: bytes, compressed: bool, txin_type: str,
|
||||
internal_use: bool=False) -> str:
|
||||
# we only export secrets inside curve range
|
||||
secret = ecc.ECPrivkey.normalize_secret_bytes(secret)
|
||||
if internal_use:
|
||||
prefix = bytes([(WIF_SCRIPT_TYPES[txin_type] + constants.net.WIF_PREFIX) & 255])
|
||||
else:
|
||||
prefix = bytes([constants.net.WIF_PREFIX])
|
||||
suffix = b'\01' if compressed else b''
|
||||
vchIn = prefix + secret + suffix
|
||||
base58_wif = EncodeBase58Check(vchIn)
|
||||
if internal_use:
|
||||
return base58_wif
|
||||
else:
|
||||
return '{}:{}'.format(txin_type, base58_wif)
|
||||
|
||||
|
||||
def deserialize_privkey(key: str) -> Tuple[str, bytes, bool]:
|
||||
if is_minikey(key):
|
||||
return 'p2pkh', minikey_to_private_key(key), False
|
||||
|
||||
txin_type = None
|
||||
if ':' in key:
|
||||
txin_type, key = key.split(sep=':', maxsplit=1)
|
||||
if txin_type not in WIF_SCRIPT_TYPES:
|
||||
raise BitcoinException('unknown script type: {}'.format(txin_type))
|
||||
try:
|
||||
vch = DecodeBase58Check(key)
|
||||
except BaseException:
|
||||
neutered_privkey = str(key)[:3] + '..' + str(key)[-2:]
|
||||
raise BitcoinException("cannot deserialize privkey {}"
|
||||
.format(neutered_privkey))
|
||||
|
||||
if txin_type is None:
|
||||
# keys exported in version 3.0.x encoded script type in first byte
|
||||
prefix_value = vch[0] - constants.net.WIF_PREFIX
|
||||
try:
|
||||
txin_type = WIF_SCRIPT_TYPES_INV[prefix_value]
|
||||
except KeyError:
|
||||
raise BitcoinException('invalid prefix ({}) for WIF key (1)'.format(vch[0]))
|
||||
else:
|
||||
# all other keys must have a fixed first byte
|
||||
if vch[0] != constants.net.WIF_PREFIX:
|
||||
raise BitcoinException('invalid prefix ({}) for WIF key (2)'.format(vch[0]))
|
||||
|
||||
if len(vch) not in [33, 34]:
|
||||
raise BitcoinException('invalid vch len for WIF key: {}'.format(len(vch)))
|
||||
compressed = len(vch) == 34
|
||||
secret_bytes = vch[1:33]
|
||||
# we accept secrets outside curve range; cast into range here:
|
||||
secret_bytes = ecc.ECPrivkey.normalize_secret_bytes(secret_bytes)
|
||||
return txin_type, secret_bytes, compressed
|
||||
|
||||
|
||||
def is_compressed_privkey(sec: str) -> bool:
|
||||
return deserialize_privkey(sec)[2]
|
||||
|
||||
|
||||
def address_from_private_key(sec: str) -> str:
|
||||
txin_type, privkey, compressed = deserialize_privkey(sec)
|
||||
public_key = ecc.ECPrivkey(privkey).get_public_key_hex(compressed=compressed)
|
||||
return pubkey_to_address(txin_type, public_key)
|
||||
|
||||
def is_segwit_address(addr: str, *, net=None) -> bool:
|
||||
if net is None: net = constants.net
|
||||
try:
|
||||
witver, witprog = segwit_addr.decode(net.SEGWIT_HRP, addr)
|
||||
except Exception as e:
|
||||
return False
|
||||
return witprog is not None
|
||||
|
||||
def is_b58_address(addr: str, *, net=None) -> bool:
|
||||
if net is None: net = constants.net
|
||||
try:
|
||||
addrtype, h = b58_address_to_hash160(addr)
|
||||
except Exception as e:
|
||||
return False
|
||||
if addrtype not in [net.ADDRTYPE_P2PKH, net.ADDRTYPE_P2SH]:
|
||||
return False
|
||||
return addr == hash160_to_b58_address(h, addrtype)
|
||||
|
||||
def is_address(addr: str, *, net=None) -> bool:
|
||||
if net is None: net = constants.net
|
||||
return is_segwit_address(addr, net=net) \
|
||||
or is_b58_address(addr, net=net)
|
||||
|
||||
|
||||
def is_private_key(key: str) -> bool:
|
||||
try:
|
||||
k = deserialize_privkey(key)
|
||||
return k is not False
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
########### end pywallet functions #######################
|
||||
|
||||
def is_minikey(text: str) -> bool:
|
||||
# Minikeys are typically 22 or 30 characters, but this routine
|
||||
# permits any length of 20 or more provided the minikey is valid.
|
||||
# A valid minikey must begin with an 'S', be in base58, and when
|
||||
# suffixed with '?' have its SHA256 hash begin with a zero byte.
|
||||
# They are widely used in Casascius physical bitcoins.
|
||||
return (len(text) >= 20 and text[0] == 'S'
|
||||
and all(ord(c) in __b58chars for c in text)
|
||||
and sha256(text + '?')[0] == 0x00)
|
||||
|
||||
def minikey_to_private_key(text: str) -> bytes:
|
||||
return sha256(text)
|
||||
@ -1,732 +0,0 @@
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2012 thomasv@ecdsa.org
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
import os
|
||||
import threading
|
||||
from typing import Optional, Dict
|
||||
|
||||
from . import util
|
||||
from .bitcoin import hash_encode, int_to_hex, rev_hex
|
||||
from .crypto import sha256d
|
||||
from . import constants
|
||||
from .util import bfh, bh2u
|
||||
from .simple_config import SimpleConfig
|
||||
|
||||
try:
|
||||
import pylibscrypt
|
||||
getPoWHash = lambda x: pylibscrypt.scrypt(password=x, salt=x, N=1024, r=1, p=1, olen=32)
|
||||
except ImportError:
|
||||
util.print_msg("Warning: package pylibscrypt not available")
|
||||
|
||||
|
||||
HEADER_SIZE = 80 # bytes
|
||||
MAX_TARGET = 0x00000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
|
||||
|
||||
|
||||
class MissingHeader(Exception):
|
||||
pass
|
||||
|
||||
class InvalidHeader(Exception):
|
||||
pass
|
||||
|
||||
def serialize_header(header_dict: dict) -> str:
|
||||
s = int_to_hex(header_dict['version'], 4) \
|
||||
+ rev_hex(header_dict['prev_block_hash']) \
|
||||
+ rev_hex(header_dict['merkle_root']) \
|
||||
+ int_to_hex(int(header_dict['timestamp']), 4) \
|
||||
+ int_to_hex(int(header_dict['bits']), 4) \
|
||||
+ int_to_hex(int(header_dict['nonce']), 4)
|
||||
return s
|
||||
|
||||
def deserialize_header(s: bytes, height: int) -> dict:
|
||||
if not s:
|
||||
raise InvalidHeader('Invalid header: {}'.format(s))
|
||||
if len(s) != HEADER_SIZE:
|
||||
raise InvalidHeader('Invalid header length: {}'.format(len(s)))
|
||||
hex_to_int = lambda s: int.from_bytes(s, byteorder='little')
|
||||
h = {}
|
||||
h['version'] = hex_to_int(s[0:4])
|
||||
h['prev_block_hash'] = hash_encode(s[4:36])
|
||||
h['merkle_root'] = hash_encode(s[36:68])
|
||||
h['timestamp'] = hex_to_int(s[68:72])
|
||||
h['bits'] = hex_to_int(s[72:76])
|
||||
h['nonce'] = hex_to_int(s[76:80])
|
||||
h['block_height'] = height
|
||||
return h
|
||||
|
||||
def hash_header(header: dict) -> str:
|
||||
if header is None:
|
||||
return '0' * 64
|
||||
if header.get('prev_block_hash') is None:
|
||||
header['prev_block_hash'] = '00'*32
|
||||
return hash_raw_header(serialize_header(header))
|
||||
|
||||
|
||||
def hash_raw_header(header: str) -> str:
|
||||
return hash_encode(sha256d(bfh(header)))
|
||||
|
||||
|
||||
# key: blockhash hex at forkpoint
|
||||
# the chain at some key is the best chain that includes the given hash
|
||||
blockchains = {} # type: Dict[str, Blockchain]
|
||||
blockchains_lock = threading.RLock()
|
||||
|
||||
|
||||
def read_blockchains(config: 'SimpleConfig'):
|
||||
best_chain = Blockchain(config=config,
|
||||
forkpoint=0,
|
||||
parent=None,
|
||||
forkpoint_hash=constants.net.GENESIS,
|
||||
prev_hash=None)
|
||||
blockchains[constants.net.GENESIS] = best_chain
|
||||
# consistency checks
|
||||
if best_chain.height() > constants.net.max_checkpoint():
|
||||
header_after_cp = best_chain.read_header(constants.net.max_checkpoint()+1)
|
||||
if not header_after_cp or not best_chain.can_connect(header_after_cp, check_height=False):
|
||||
util.print_error("[blockchain] deleting best chain. cannot connect header after last cp to last cp.")
|
||||
os.unlink(best_chain.path())
|
||||
best_chain.update_size()
|
||||
# forks
|
||||
fdir = os.path.join(util.get_headers_dir(config), 'forks')
|
||||
util.make_dir(fdir)
|
||||
# files are named as: fork2_{forkpoint}_{prev_hash}_{first_hash}
|
||||
l = filter(lambda x: x.startswith('fork2_') and '.' not in x, os.listdir(fdir))
|
||||
l = sorted(l, key=lambda x: int(x.split('_')[1])) # sort by forkpoint
|
||||
|
||||
def delete_chain(filename, reason):
|
||||
util.print_error(f"[blockchain] deleting chain {filename}: {reason}")
|
||||
os.unlink(os.path.join(fdir, filename))
|
||||
|
||||
def instantiate_chain(filename):
|
||||
__, forkpoint, prev_hash, first_hash = filename.split('_')
|
||||
forkpoint = int(forkpoint)
|
||||
prev_hash = (64-len(prev_hash)) * "0" + prev_hash # left-pad with zeroes
|
||||
first_hash = (64-len(first_hash)) * "0" + first_hash
|
||||
# forks below the max checkpoint are not allowed
|
||||
if forkpoint <= constants.net.max_checkpoint():
|
||||
delete_chain(filename, "deleting fork below max checkpoint")
|
||||
return
|
||||
# find parent (sorting by forkpoint guarantees it's already instantiated)
|
||||
for parent in blockchains.values():
|
||||
if parent.check_hash(forkpoint - 1, prev_hash):
|
||||
break
|
||||
else:
|
||||
delete_chain(filename, "cannot find parent for chain")
|
||||
return
|
||||
b = Blockchain(config=config,
|
||||
forkpoint=forkpoint,
|
||||
parent=parent,
|
||||
forkpoint_hash=first_hash,
|
||||
prev_hash=prev_hash)
|
||||
# consistency checks
|
||||
h = b.read_header(b.forkpoint)
|
||||
if first_hash != hash_header(h):
|
||||
delete_chain(filename, "incorrect first hash for chain")
|
||||
return
|
||||
if not b.parent.can_connect(h, check_height=False):
|
||||
delete_chain(filename, "cannot connect chain to parent")
|
||||
return
|
||||
chain_id = b.get_id()
|
||||
assert first_hash == chain_id, (first_hash, chain_id)
|
||||
blockchains[chain_id] = b
|
||||
|
||||
for filename in l:
|
||||
instantiate_chain(filename)
|
||||
|
||||
|
||||
def pow_hash_header(header):
|
||||
return hash_encode(getPoWHash(bfh(serialize_header(header))))
|
||||
|
||||
|
||||
def get_best_chain() -> 'Blockchain':
|
||||
return blockchains[constants.net.GENESIS]
|
||||
|
||||
# block hash -> chain work; up to and including that block
|
||||
_CHAINWORK_CACHE = {
|
||||
"0000000000000000000000000000000000000000000000000000000000000000": 0, # virtual block at height -1
|
||||
} # type: Dict[str, int]
|
||||
|
||||
|
||||
class Blockchain(util.PrintError):
|
||||
"""
|
||||
Manages blockchain headers and their verification
|
||||
"""
|
||||
|
||||
def __init__(self, config: SimpleConfig, forkpoint: int, parent: Optional['Blockchain'],
|
||||
forkpoint_hash: str, prev_hash: Optional[str]):
|
||||
assert isinstance(forkpoint_hash, str) and len(forkpoint_hash) == 64, forkpoint_hash
|
||||
assert (prev_hash is None) or (isinstance(prev_hash, str) and len(prev_hash) == 64), prev_hash
|
||||
# assert (parent is None) == (forkpoint == 0)
|
||||
if 0 < forkpoint <= constants.net.max_checkpoint():
|
||||
raise Exception(f"cannot fork below max checkpoint. forkpoint: {forkpoint}")
|
||||
self.config = config
|
||||
self.forkpoint = forkpoint # height of first header
|
||||
self.parent = parent
|
||||
self._forkpoint_hash = forkpoint_hash # blockhash at forkpoint. "first hash"
|
||||
self._prev_hash = prev_hash # blockhash immediately before forkpoint
|
||||
self.lock = threading.RLock()
|
||||
self.update_size()
|
||||
|
||||
def with_lock(func):
|
||||
def func_wrapper(self, *args, **kwargs):
|
||||
with self.lock:
|
||||
return func(self, *args, **kwargs)
|
||||
return func_wrapper
|
||||
|
||||
@property
|
||||
def checkpoints(self):
|
||||
return constants.net.CHECKPOINTS
|
||||
|
||||
def get_max_child(self) -> Optional[int]:
|
||||
with blockchains_lock: chains = list(blockchains.values())
|
||||
children = list(filter(lambda y: y.parent==self, chains))
|
||||
return max([x.forkpoint for x in children]) if children else None
|
||||
|
||||
def get_max_forkpoint(self) -> int:
|
||||
"""Returns the max height where there is a fork
|
||||
related to this chain.
|
||||
"""
|
||||
mc = self.get_max_child()
|
||||
return mc if mc is not None else self.forkpoint
|
||||
|
||||
@with_lock
|
||||
def get_branch_size(self) -> int:
|
||||
return self.height() - self.get_max_forkpoint() + 1
|
||||
|
||||
def get_name(self) -> str:
|
||||
return self.get_hash(self.get_max_forkpoint()).lstrip('0')[0:10]
|
||||
|
||||
def check_header(self, header: dict) -> bool:
|
||||
header_hash = hash_header(header)
|
||||
height = header.get('block_height')
|
||||
return self.check_hash(height, header_hash)
|
||||
|
||||
def check_hash(self, height: int, header_hash: str) -> bool:
|
||||
"""Returns whether the hash of the block at given height
|
||||
is the given hash.
|
||||
"""
|
||||
assert isinstance(header_hash, str) and len(header_hash) == 64, header_hash # hex
|
||||
try:
|
||||
return header_hash == self.get_hash(height)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def fork(parent, header: dict) -> 'Blockchain':
|
||||
if not parent.can_connect(header, check_height=False):
|
||||
raise Exception("forking header does not connect to parent chain")
|
||||
forkpoint = header.get('block_height')
|
||||
self = Blockchain(config=parent.config,
|
||||
forkpoint=forkpoint,
|
||||
parent=parent,
|
||||
forkpoint_hash=hash_header(header),
|
||||
prev_hash=parent.get_hash(forkpoint-1))
|
||||
open(self.path(), 'w+').close()
|
||||
self.save_header(header)
|
||||
# put into global dict. note that in some cases
|
||||
# save_header might have already put it there but that's OK
|
||||
chain_id = self.get_id()
|
||||
with blockchains_lock:
|
||||
blockchains[chain_id] = self
|
||||
return self
|
||||
|
||||
@with_lock
|
||||
def height(self) -> int:
|
||||
return self.forkpoint + self.size() - 1
|
||||
|
||||
@with_lock
|
||||
def size(self) -> int:
|
||||
return self._size
|
||||
|
||||
@with_lock
|
||||
def update_size(self) -> None:
|
||||
p = self.path()
|
||||
self._size = os.path.getsize(p)//HEADER_SIZE if os.path.exists(p) else 0
|
||||
|
||||
#def pow_hash_header(header):
|
||||
# return hash_encode(getPoWHash(bfh(serialize_header(header))))
|
||||
|
||||
@classmethod
|
||||
def verify_header(cls, header: dict, prev_hash: str, target: int, expected_header_hash: str=None) -> None:
|
||||
_hash = hash_header(header)
|
||||
_powhash = pow_hash_header(header)
|
||||
if expected_header_hash and expected_header_hash != _hash:
|
||||
raise Exception("hash mismatches with expected: {} vs {}".format(expected_header_hash, _hash))
|
||||
if prev_hash != header.get('prev_block_hash'):
|
||||
raise Exception("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
|
||||
if constants.net.TESTNET:
|
||||
return
|
||||
bits = cls.target_to_bits(target)
|
||||
bits = target
|
||||
if bits != header.get('bits'):
|
||||
raise Exception("bits mismatch: %s vs %s" % (bits, header.get('bits')))
|
||||
block_hash_as_num = int.from_bytes(bfh(_hash), byteorder='big')
|
||||
target_val = cls.bits_to_target(bits)
|
||||
if int('0x' + _powhash, 16) > target_val:
|
||||
raise Exception("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target_val))
|
||||
|
||||
def verify_chunk(self, index: int, data: bytes) -> None:
|
||||
num = len(data) // HEADER_SIZE
|
||||
current_header = index * 2016
|
||||
print('chunk ' + str(index))
|
||||
prev_hash = self.get_hash(current_header - 1)
|
||||
headerLast = None
|
||||
headerFirst = None
|
||||
capture = None
|
||||
lst = []
|
||||
for i in range(num):
|
||||
averaging_interval = self.AveragingInterval(current_header)
|
||||
difficulty_interval = self.DifficultyAdjustmentInterval(current_header)
|
||||
if current_header < 426000:
|
||||
target = self.get_target(current_header - 1, headerLast, headerFirst)
|
||||
try:
|
||||
expected_header_hash = self.get_hash(current_header)
|
||||
except MissingHeader:
|
||||
expected_header_hash = None
|
||||
raw_header = data[i*HEADER_SIZE : (i+1)*HEADER_SIZE]
|
||||
header = deserialize_header(raw_header, current_header)
|
||||
self.verify_header(header, prev_hash, target, expected_header_hash)
|
||||
prev_hash = hash_header(header)
|
||||
headerLast = header
|
||||
if current_header == 0:
|
||||
headerFirst = header
|
||||
elif (current_header + averaging_interval + 1) % difficulty_interval == 0:
|
||||
capture = header
|
||||
if current_header != 0 and current_header % difficulty_interval == 0:
|
||||
headerFirst = capture
|
||||
if current_header >= 425993:
|
||||
lst.append(headerLast)
|
||||
current_header = current_header + 1
|
||||
else:
|
||||
if len(lst)>6:
|
||||
headerFirst = lst[0]
|
||||
target = self.get_target(current_header - 1, headerLast, headerFirst)
|
||||
try:
|
||||
expected_header_hash = self.get_hash(current_header)
|
||||
except MissingHeader:
|
||||
expected_header_hash = None
|
||||
raw_header = data[i * HEADER_SIZE: (i + 1) * HEADER_SIZE]
|
||||
header = deserialize_header(raw_header, current_header)
|
||||
self.verify_header(header, prev_hash, target, expected_header_hash)
|
||||
prev_hash = hash_header(header)
|
||||
headerLast = header
|
||||
lst.append(header)
|
||||
if len(lst)>7:
|
||||
lst.pop(0)
|
||||
current_header = current_header + 1
|
||||
|
||||
@with_lock
|
||||
def path(self):
|
||||
d = util.get_headers_dir(self.config)
|
||||
if self.parent is None:
|
||||
filename = 'blockchain_headers'
|
||||
else:
|
||||
assert self.forkpoint > 0, self.forkpoint
|
||||
prev_hash = self._prev_hash.lstrip('0')
|
||||
first_hash = self._forkpoint_hash.lstrip('0')
|
||||
basename = f'fork2_{self.forkpoint}_{prev_hash}_{first_hash}'
|
||||
filename = os.path.join('forks', basename)
|
||||
return os.path.join(d, filename)
|
||||
|
||||
@with_lock
|
||||
def save_chunk(self, index: int, chunk: bytes):
|
||||
assert index >= 0, index
|
||||
chunk_within_checkpoint_region = index < len(self.checkpoints)
|
||||
# chunks in checkpoint region are the responsibility of the 'main chain'
|
||||
if chunk_within_checkpoint_region and self.parent is not None:
|
||||
main_chain = get_best_chain()
|
||||
main_chain.save_chunk(index, chunk)
|
||||
return
|
||||
|
||||
delta_height = (index * 2016 - self.forkpoint)
|
||||
delta_bytes = delta_height * HEADER_SIZE
|
||||
# if this chunk contains our forkpoint, only save the part after forkpoint
|
||||
# (the part before is the responsibility of the parent)
|
||||
if delta_bytes < 0:
|
||||
chunk = chunk[-delta_bytes:]
|
||||
delta_bytes = 0
|
||||
truncate = not chunk_within_checkpoint_region
|
||||
self.write(chunk, delta_bytes, truncate)
|
||||
self.swap_with_parent()
|
||||
|
||||
def swap_with_parent(self) -> None:
|
||||
parent_lock = self.parent.lock if self.parent is not None else threading.Lock()
|
||||
with parent_lock, self.lock, blockchains_lock: # this order should not deadlock
|
||||
# do the swap; possibly multiple ones
|
||||
cnt = 0
|
||||
while self._swap_with_parent():
|
||||
cnt += 1
|
||||
if cnt > len(blockchains): # make sure we are making progress
|
||||
raise Exception(f'swapping fork with parent too many times: {cnt}')
|
||||
|
||||
def _swap_with_parent(self) -> bool:
|
||||
"""Check if this chain became stronger than its parent, and swap
|
||||
the underlying files if so. The Blockchain instances will keep
|
||||
'containing' the same headers, but their ids change and so
|
||||
they will be stored in different files."""
|
||||
if self.parent is None:
|
||||
return False
|
||||
if self.parent.get_chainwork() >= self.get_chainwork():
|
||||
return False
|
||||
self.print_error("swap", self.forkpoint, self.parent.forkpoint)
|
||||
parent_branch_size = self.parent.height() - self.forkpoint + 1
|
||||
forkpoint = self.forkpoint # type: Optional[int]
|
||||
parent = self.parent # type: Optional[Blockchain]
|
||||
child_old_id = self.get_id()
|
||||
parent_old_id = parent.get_id()
|
||||
# swap files
|
||||
# child takes parent's name
|
||||
# parent's new name will be something new (not child's old name)
|
||||
self.assert_headers_file_available(self.path())
|
||||
child_old_name = self.path()
|
||||
with open(self.path(), 'rb') as f:
|
||||
my_data = f.read()
|
||||
self.assert_headers_file_available(parent.path())
|
||||
with open(parent.path(), 'rb') as f:
|
||||
f.seek((forkpoint - parent.forkpoint)*HEADER_SIZE)
|
||||
parent_data = f.read(parent_branch_size*HEADER_SIZE)
|
||||
self.write(parent_data, 0)
|
||||
parent.write(my_data, (forkpoint - parent.forkpoint)*HEADER_SIZE)
|
||||
# swap parameters
|
||||
self.parent, parent.parent = parent.parent, self # type: Optional[Blockchain], Optional[Blockchain]
|
||||
self.forkpoint, parent.forkpoint = parent.forkpoint, self.forkpoint
|
||||
self._forkpoint_hash, parent._forkpoint_hash = parent._forkpoint_hash, hash_raw_header(bh2u(parent_data[:HEADER_SIZE]))
|
||||
self._prev_hash, parent._prev_hash = parent._prev_hash, self._prev_hash
|
||||
# parent's new name
|
||||
os.replace(child_old_name, parent.path())
|
||||
self.update_size()
|
||||
parent.update_size()
|
||||
# update pointers
|
||||
blockchains.pop(child_old_id, None)
|
||||
blockchains.pop(parent_old_id, None)
|
||||
blockchains[self.get_id()] = self
|
||||
blockchains[parent.get_id()] = parent
|
||||
return True
|
||||
|
||||
def get_id(self) -> str:
|
||||
return self._forkpoint_hash
|
||||
|
||||
def assert_headers_file_available(self, path):
|
||||
if os.path.exists(path):
|
||||
return
|
||||
elif not os.path.exists(util.get_headers_dir(self.config)):
|
||||
raise FileNotFoundError('Electrum headers_dir does not exist. Was it deleted while running?')
|
||||
else:
|
||||
raise FileNotFoundError('Cannot find headers file but headers_dir is there. Should be at {}'.format(path))
|
||||
|
||||
@with_lock
|
||||
def write(self, data: bytes, offset: int, truncate: bool=True) -> None:
|
||||
filename = self.path()
|
||||
self.assert_headers_file_available(filename)
|
||||
with open(filename, 'rb+') as f:
|
||||
if truncate and offset != self._size * HEADER_SIZE:
|
||||
f.seek(offset)
|
||||
f.truncate()
|
||||
f.seek(offset)
|
||||
f.write(data)
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
self.update_size()
|
||||
|
||||
@with_lock
|
||||
def save_header(self, header: dict) -> None:
|
||||
delta = header.get('block_height') - self.forkpoint
|
||||
data = bfh(serialize_header(header))
|
||||
# headers are only _appended_ to the end:
|
||||
assert delta == self.size(), (delta, self.size())
|
||||
assert len(data) == HEADER_SIZE
|
||||
self.write(data, delta*HEADER_SIZE)
|
||||
self.swap_with_parent()
|
||||
|
||||
@with_lock
|
||||
def read_header(self, height: int) -> Optional[dict]:
|
||||
if height < 0:
|
||||
return
|
||||
if height < self.forkpoint:
|
||||
return self.parent.read_header(height)
|
||||
if height > self.height():
|
||||
return
|
||||
delta = height - self.forkpoint
|
||||
name = self.path()
|
||||
self.assert_headers_file_available(name)
|
||||
with open(name, 'rb') as f:
|
||||
f.seek(delta * HEADER_SIZE)
|
||||
h = f.read(HEADER_SIZE)
|
||||
if len(h) < HEADER_SIZE:
|
||||
raise Exception('Expected to read a full header. This was only {} bytes'.format(len(h)))
|
||||
if h == bytes([0])*HEADER_SIZE:
|
||||
return None
|
||||
return deserialize_header(h, height)
|
||||
|
||||
def header_at_tip(self) -> Optional[dict]:
|
||||
"""Return latest header."""
|
||||
height = self.height()
|
||||
return self.read_header(height)
|
||||
|
||||
def get_hash(self, height: int) -> str:
|
||||
def is_height_checkpoint():
|
||||
within_cp_range = height <= constants.net.max_checkpoint()
|
||||
at_chunk_boundary = (height+1) % 2016 == 0
|
||||
return within_cp_range and at_chunk_boundary
|
||||
|
||||
if height == -1:
|
||||
return '0000000000000000000000000000000000000000000000000000000000000000'
|
||||
elif height == 0:
|
||||
return constants.net.GENESIS
|
||||
elif is_height_checkpoint():
|
||||
index = height // 2016
|
||||
h, t = self.checkpoints[index]
|
||||
return h
|
||||
else:
|
||||
header = self.read_header(height)
|
||||
if header is None:
|
||||
raise MissingHeader(height)
|
||||
return hash_header(header)
|
||||
|
||||
def get_target(self, index: int, headerLast: dict=None, headerFirst: dict=None) -> int:
|
||||
# compute target from chunk x, used in chunk x+1
|
||||
if constants.net.TESTNET:
|
||||
return 0
|
||||
# The range is first 90 blocks because FLO's block time was 90 blocks when it started
|
||||
if -1 <= index <= 88:
|
||||
return 0x1e0ffff0
|
||||
if index < len(self.checkpoints):
|
||||
h, t = self.checkpoints[index]
|
||||
return t
|
||||
# new target
|
||||
if headerLast is None:
|
||||
headerLast = self.read_header(index)
|
||||
height = headerLast["block_height"]
|
||||
# check if the height passes is in range for retargeting
|
||||
if (height + 1) % self.DifficultyAdjustmentInterval(height + 1) != 0:
|
||||
return int(headerLast["bits"])
|
||||
if headerFirst is None:
|
||||
averagingInterval = self.AveragingInterval(height + 1)
|
||||
blockstogoback = averagingInterval - 1
|
||||
# print("Blocks to go back = " + str(blockstogoback))
|
||||
if (height + 1) != averagingInterval:
|
||||
blockstogoback = averagingInterval
|
||||
firstHeight = height - blockstogoback
|
||||
headerFirst = self.read_header(int(firstHeight))
|
||||
|
||||
firstBlockTime = headerFirst["timestamp"]
|
||||
nMinActualTimespan = int(self.MinActualTimespan(int(headerLast["block_height"]) + 1))
|
||||
|
||||
nMaxActualTimespan = int(self.MaxActualTimespan(int(headerLast["block_height"]) + 1))
|
||||
# Limit adjustment step
|
||||
nActualTimespan = headerLast["timestamp"] - firstBlockTime
|
||||
if nActualTimespan < nMinActualTimespan:
|
||||
nActualTimespan = nMinActualTimespan
|
||||
if nActualTimespan > nMaxActualTimespan:
|
||||
nActualTimespan = nMaxActualTimespan
|
||||
# Retarget
|
||||
bnNewBits = int(headerLast["bits"])
|
||||
bnNew = self.bits_to_target(bnNewBits)
|
||||
bnOld = bnNew
|
||||
# FLO: intermediate uint256 can overflow by 1 bit
|
||||
# const arith_uint256 bnPowLimit = UintToArith256(params.powLimit);
|
||||
fShift = bnNew > MAX_TARGET - 1
|
||||
|
||||
if (fShift):
|
||||
bnNew = bnNew >> 1
|
||||
bnNew = bnNew * nActualTimespan
|
||||
bnNew = bnNew / self.TargetTimespan(headerLast["block_height"] + 1)
|
||||
if fShift:
|
||||
bnNew = bnNew << 1
|
||||
if bnNew > MAX_TARGET:
|
||||
bnNew = MAX_TARGET
|
||||
bnNew = self.target_to_bits(int(bnNew))
|
||||
return bnNew
|
||||
|
||||
@classmethod
|
||||
def bits_to_target(cls, bits: int) -> int:
|
||||
bitsN = (bits >> 24) & 0xff
|
||||
if not (0x03 <= bitsN <= 0x1e):
|
||||
raise Exception("First part of bits should be in [0x03, 0x1e]")
|
||||
bitsBase = bits & 0xffffff
|
||||
if not (0x8000 <= bitsBase <= 0x7fffff):
|
||||
raise Exception("Second part of bits should be in [0x8000, 0x7fffff]")
|
||||
return bitsBase << (8 * (bitsN - 3))
|
||||
|
||||
@classmethod
|
||||
def target_to_bits(cls, target: int) -> int:
|
||||
c = ("%064x" % target)[2:]
|
||||
while c[:2] == '00' and len(c) > 6:
|
||||
c = c[2:]
|
||||
bitsN, bitsBase = len(c) // 2, int.from_bytes(bfh(c[:6]), byteorder='big')
|
||||
if bitsBase >= 0x800000:
|
||||
bitsN += 1
|
||||
bitsBase >>= 8
|
||||
return bitsN << 24 | bitsBase
|
||||
|
||||
def chainwork_of_header_at_height(self, height: int) -> int:
|
||||
"""work done by single header at given height"""
|
||||
chunk_idx = height // 2016 - 1
|
||||
target = self.get_target(chunk_idx)
|
||||
work = ((2 ** 256 - target - 1) // (target + 1)) + 1
|
||||
return work
|
||||
|
||||
@with_lock
|
||||
def get_chainwork(self, height=None) -> int:
|
||||
if height is None:
|
||||
height = max(0, self.height())
|
||||
if constants.net.TESTNET:
|
||||
# On testnet/regtest, difficulty works somewhat different.
|
||||
# It's out of scope to properly implement that.
|
||||
return height
|
||||
last_retarget = height // 2016 * 2016 - 1
|
||||
cached_height = last_retarget
|
||||
while _CHAINWORK_CACHE.get(self.get_hash(cached_height)) is None:
|
||||
if cached_height <= -1:
|
||||
break
|
||||
cached_height -= 2016
|
||||
assert cached_height >= -1, cached_height
|
||||
running_total = _CHAINWORK_CACHE[self.get_hash(cached_height)]
|
||||
while cached_height < last_retarget:
|
||||
cached_height += 2016
|
||||
work_in_single_header = self.chainwork_of_header_at_height(cached_height)
|
||||
work_in_chunk = 2016 * work_in_single_header
|
||||
running_total += work_in_chunk
|
||||
_CHAINWORK_CACHE[self.get_hash(cached_height)] = running_total
|
||||
cached_height += 2016
|
||||
work_in_single_header = self.chainwork_of_header_at_height(cached_height)
|
||||
work_in_last_partial_chunk = (height % 2016 + 1) * work_in_single_header
|
||||
return running_total + work_in_last_partial_chunk
|
||||
|
||||
def can_connect(self, header: dict, check_height: bool=True) -> bool:
|
||||
if header is None:
|
||||
return False
|
||||
height = header['block_height']
|
||||
if check_height and self.height() != height - 1:
|
||||
#self.print_error("cannot connect at height", height)
|
||||
return False
|
||||
if height == 0:
|
||||
return hash_header(header) == constants.net.GENESIS
|
||||
try:
|
||||
prev_hash = self.get_hash(height - 1)
|
||||
except:
|
||||
return False
|
||||
if prev_hash != header.get('prev_block_hash'):
|
||||
return False
|
||||
try:
|
||||
target = self.get_target(height - 1)
|
||||
except MissingHeader:
|
||||
return False
|
||||
try:
|
||||
self.verify_header(header, prev_hash, target)
|
||||
except BaseException as e:
|
||||
return False
|
||||
return True
|
||||
|
||||
def connect_chunk(self, idx: int, hexdata: str) -> bool:
|
||||
assert idx >= 0, idx
|
||||
try:
|
||||
data = bfh(hexdata)
|
||||
self.verify_chunk(idx, data)
|
||||
#self.print_error("validated chunk %d" % idx)
|
||||
self.save_chunk(idx, data)
|
||||
return True
|
||||
except BaseException as e:
|
||||
self.print_error(f'verify_chunk idx {idx} failed: {repr(e)}')
|
||||
return False
|
||||
|
||||
def get_checkpoints(self):
|
||||
# for each chunk, store the hash of the last block and the target after the chunk
|
||||
cp = []
|
||||
n = self.height() // 2016
|
||||
for index in range(n):
|
||||
h = self.get_hash((index+1) * 2016 -1)
|
||||
target = self.get_target(index)
|
||||
cp.append((h, target))
|
||||
return cp
|
||||
|
||||
def AveragingInterval(self, height):
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return constants.net.nAveragingInterval_Version1
|
||||
# V2
|
||||
elif height < constants.net.nHeight_Difficulty_Version3:
|
||||
return constants.net.nAveragingInterval_Version2
|
||||
# V3
|
||||
else:
|
||||
return constants.net.nAveragingInterval_Version3
|
||||
|
||||
def MinActualTimespan(self, height):
|
||||
averagingTargetTimespan = self.AveragingInterval(height) * constants.net.nPowTargetSpacing
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return int(averagingTargetTimespan * (100 - constants.net.nMaxAdjustUp_Version1) / 100)
|
||||
# V2
|
||||
elif height < constants.net.nHeight_Difficulty_Version3:
|
||||
return int(averagingTargetTimespan * (100 - constants.net.nMaxAdjustUp_Version2) / 100)
|
||||
# V3
|
||||
else:
|
||||
return int(averagingTargetTimespan * (100 - constants.net.nMaxAdjustUp_Version3) / 100)
|
||||
|
||||
def MaxActualTimespan(self, height):
|
||||
averagingTargetTimespan = self.AveragingInterval(height) * constants.net.nPowTargetSpacing
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return int(averagingTargetTimespan * (100 + constants.net.nMaxAdjustDown_Version1) / 100)
|
||||
# V2
|
||||
elif height < constants.net.nHeight_Difficulty_Version3:
|
||||
return int(averagingTargetTimespan * (100 + constants.net.nMaxAdjustDown_Version2) / 100)
|
||||
# V3
|
||||
else:
|
||||
return int(averagingTargetTimespan * (100 + constants.net.nMaxAdjustDown_Version3) / 100)
|
||||
|
||||
def TargetTimespan(self, height):
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return constants.net.nTargetTimespan_Version1
|
||||
# V2
|
||||
if height < constants.net.nHeight_Difficulty_Version3:
|
||||
return constants.net.nAveragingInterval_Version2 * constants.net.nPowTargetSpacing
|
||||
# V3
|
||||
return constants.net.nAveragingInterval_Version3 * constants.net.nPowTargetSpacing
|
||||
|
||||
def DifficultyAdjustmentInterval(self, height):
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return constants.net.nInterval_Version1
|
||||
# V2
|
||||
if height < constants.net.nHeight_Difficulty_Version3:
|
||||
return constants.net.nInterval_Version2
|
||||
# V3
|
||||
return constants.net.nInterval_Version3
|
||||
|
||||
|
||||
def check_header(header: dict) -> Optional[Blockchain]:
|
||||
if type(header) is not dict:
|
||||
return None
|
||||
with blockchains_lock: chains = list(blockchains.values())
|
||||
for b in chains:
|
||||
if b.check_header(header):
|
||||
return b
|
||||
return None
|
||||
|
||||
|
||||
def can_connect(header: dict) -> Optional[Blockchain]:
|
||||
with blockchains_lock: chains = list(blockchains.values())
|
||||
for b in chains:
|
||||
if b.can_connect(header):
|
||||
return b
|
||||
return None
|
||||
@ -1,3 +0,0 @@
|
||||
[
|
||||
|
||||
]
|
||||
@ -1,3 +0,0 @@
|
||||
[
|
||||
|
||||
]
|
||||
@ -1,185 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2018 The Electrum developers
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
def read_json(filename, default):
|
||||
path = os.path.join(os.path.dirname(__file__), filename)
|
||||
try:
|
||||
with open(path, 'r') as f:
|
||||
r = json.loads(f.read())
|
||||
except:
|
||||
r = default
|
||||
return r
|
||||
|
||||
|
||||
class AbstractNet:
|
||||
|
||||
@classmethod
|
||||
def max_checkpoint(cls) -> int:
|
||||
return max(0, len(cls.CHECKPOINTS) * 2016 - 1)
|
||||
|
||||
|
||||
class BitcoinMainnet(AbstractNet):
|
||||
|
||||
TESTNET = False
|
||||
WIF_PREFIX = 0xa3
|
||||
ADDRTYPE_P2PKH = 35
|
||||
ADDRTYPE_P2SH = 94
|
||||
SEGWIT_HRP = "flo"
|
||||
GENESIS = "09c7781c9df90708e278c35d38ea5c9041d7ecfcdd1c56ba67274b7cff3e1cea"
|
||||
DEFAULT_PORTS = {'t': '50001', 's': '50002'}
|
||||
DEFAULT_SERVERS = read_json('servers.json', {})
|
||||
CHECKPOINTS = read_json('checkpoints.json', [])
|
||||
|
||||
XPRV_HEADERS = {
|
||||
'standard': 0x01343c31, # xprv
|
||||
'p2wpkh-p2sh': 0x049d7878, # yprv
|
||||
'p2wsh-p2sh': 0x0295b005, # Yprv
|
||||
'p2wpkh': 0x04b2430c, # zprv
|
||||
'p2wsh': 0x02aa7a99, # Zprv
|
||||
}
|
||||
XPUB_HEADERS = {
|
||||
'standard': 0x0134406b, # xpub
|
||||
'p2wpkh-p2sh': 0x049d7cb2, # ypub
|
||||
'p2wsh-p2sh': 0x0295b43f, # Ypub
|
||||
'p2wpkh': 0x04b24746, # zpub
|
||||
'p2wsh': 0x02aa7ed3, # Zpub
|
||||
}
|
||||
BIP44_COIN_TYPE = 216
|
||||
# FLO Network constants
|
||||
fPowAllowMinDifficultyBlocks = False
|
||||
fPowNoRetargeting = False
|
||||
nRuleChangeActivationThreshold = 6048 # 75% of 8064
|
||||
nMinerConfirmationWindow = 8064
|
||||
# Difficulty adjustments
|
||||
nPowTargetSpacing = 40 # 40s block time
|
||||
# V1
|
||||
nTargetTimespan_Version1 = 60 * 60
|
||||
nInterval_Version1 = nTargetTimespan_Version1 / nPowTargetSpacing
|
||||
nMaxAdjustUp_Version1 = 75
|
||||
nMaxAdjustDown_Version1 = 300
|
||||
nAveragingInterval_Version1 = nInterval_Version1
|
||||
# V2
|
||||
nHeight_Difficulty_Version2 = 208440
|
||||
nInterval_Version2 = 15
|
||||
nMaxAdjustDown_Version2 = 300
|
||||
nMaxAdjustUp_Version2 = 75
|
||||
nAveragingInterval_Version2 = nInterval_Version2
|
||||
# V3
|
||||
nHeight_Difficulty_Version3 = 426000
|
||||
nInterval_Version3 = 1
|
||||
nMaxAdjustDown_Version3 = 3
|
||||
nMaxAdjustUp_Version3 = 2
|
||||
nAveragingInterval_Version3 = 6
|
||||
|
||||
|
||||
class BitcoinTestnet(AbstractNet):
|
||||
|
||||
TESTNET = True
|
||||
WIF_PREFIX = 0xef
|
||||
ADDRTYPE_P2PKH = 115
|
||||
ADDRTYPE_P2SH = 58
|
||||
SEGWIT_HRP = "tflo"
|
||||
GENESIS = "9b7bc86236c34b5e3a39367c036b7fe8807a966c22a7a1f0da2a198a27e03731"
|
||||
DEFAULT_PORTS = {'t': '51001', 's': '51002'}
|
||||
DEFAULT_SERVERS = read_json('servers_testnet.json', {})
|
||||
CHECKPOINTS = read_json('checkpoints_testnet.json', [])
|
||||
|
||||
XPRV_HEADERS = {
|
||||
'standard': 0x01343c23, # tprv
|
||||
'p2wpkh-p2sh': 0x044a4e28, # uprv
|
||||
'p2wsh-p2sh': 0x024285b5, # Uprv
|
||||
'p2wpkh': 0x045f18bc, # vprv
|
||||
'p2wsh': 0x02575048, # Vprv
|
||||
}
|
||||
XPUB_HEADERS = {
|
||||
'standard': 0x013440e2, # tpub
|
||||
'p2wpkh-p2sh': 0x044a5262, # upub
|
||||
'p2wsh-p2sh': 0x024289ef, # Upub
|
||||
'p2wpkh': 0x045f1cf6, # vpub
|
||||
'p2wsh': 0x02575483, # Vpub
|
||||
}
|
||||
BIP44_COIN_TYPE = 1
|
||||
#Difficulty adjustments
|
||||
nPowTargetSpacing = 40 # 40 block time
|
||||
# V1
|
||||
nTargetTimespan_Version1 = 60 * 60
|
||||
nInterval_Version1 = nTargetTimespan_Version1 / nPowTargetSpacing;
|
||||
nMaxAdjustUp_Version1 = 75
|
||||
nMaxAdjustDown_Version1 = 300
|
||||
nAveragingInterval_Version1 = nInterval_Version1
|
||||
# V2
|
||||
nHeight_Difficulty_Version2 = 50000
|
||||
nInterval_Version2 = 15
|
||||
nMaxAdjustDown_Version2 = 300
|
||||
nMaxAdjustUp_Version2 = 75
|
||||
nAveragingInterval_Version2 = nInterval_Version2
|
||||
# V3
|
||||
nHeight_Difficulty_Version3 = 60000
|
||||
nInterval_Version3 = 1
|
||||
nMaxAdjustDown_Version3 = 3
|
||||
nMaxAdjustUp_Version3 = 2
|
||||
nAveragingInterval_Version3 = 6
|
||||
|
||||
|
||||
class BitcoinRegtest(BitcoinTestnet):
|
||||
|
||||
SEGWIT_HRP = "bcrt"
|
||||
GENESIS = "0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206"
|
||||
DEFAULT_SERVERS = read_json('servers_regtest.json', {})
|
||||
CHECKPOINTS = []
|
||||
|
||||
|
||||
class BitcoinSimnet(BitcoinTestnet):
|
||||
|
||||
SEGWIT_HRP = "sb"
|
||||
GENESIS = "683e86bd5c6d110d91b94b97137ba6bfe02dbbdb8e3dff722a669b5d69d77af6"
|
||||
DEFAULT_SERVERS = read_json('servers_regtest.json', {})
|
||||
CHECKPOINTS = []
|
||||
|
||||
|
||||
# don't import net directly, import the module instead (so that net is singleton)
|
||||
net = BitcoinMainnet
|
||||
|
||||
def set_simnet():
|
||||
global net
|
||||
net = BitcoinSimnet
|
||||
|
||||
def set_mainnet():
|
||||
global net
|
||||
net = BitcoinMainnet
|
||||
|
||||
|
||||
def set_testnet():
|
||||
global net
|
||||
net = BitcoinTestnet
|
||||
|
||||
|
||||
def set_regtest():
|
||||
global net
|
||||
net = BitcoinRegtest
|
||||
@ -1,216 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2018 The Electrum developers
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import base64
|
||||
import os
|
||||
import hashlib
|
||||
import hmac
|
||||
from typing import Union
|
||||
|
||||
import pyaes
|
||||
|
||||
from .util import assert_bytes, InvalidPassword, to_bytes, to_string, WalletFileException
|
||||
from .i18n import _
|
||||
|
||||
|
||||
try:
|
||||
from Cryptodome.Cipher import AES
|
||||
except:
|
||||
AES = None
|
||||
|
||||
|
||||
class InvalidPadding(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def append_PKCS7_padding(data: bytes) -> bytes:
|
||||
assert_bytes(data)
|
||||
padlen = 16 - (len(data) % 16)
|
||||
return data + bytes([padlen]) * padlen
|
||||
|
||||
|
||||
def strip_PKCS7_padding(data: bytes) -> bytes:
|
||||
assert_bytes(data)
|
||||
if len(data) % 16 != 0 or len(data) == 0:
|
||||
raise InvalidPadding("invalid length")
|
||||
padlen = data[-1]
|
||||
if not (0 < padlen <= 16):
|
||||
raise InvalidPadding("invalid padding byte (out of range)")
|
||||
for i in data[-padlen:]:
|
||||
if i != padlen:
|
||||
raise InvalidPadding("invalid padding byte (inconsistent)")
|
||||
return data[0:-padlen]
|
||||
|
||||
|
||||
def aes_encrypt_with_iv(key: bytes, iv: bytes, data: bytes) -> bytes:
|
||||
assert_bytes(key, iv, data)
|
||||
data = append_PKCS7_padding(data)
|
||||
if AES:
|
||||
e = AES.new(key, AES.MODE_CBC, iv).encrypt(data)
|
||||
else:
|
||||
aes_cbc = pyaes.AESModeOfOperationCBC(key, iv=iv)
|
||||
aes = pyaes.Encrypter(aes_cbc, padding=pyaes.PADDING_NONE)
|
||||
e = aes.feed(data) + aes.feed() # empty aes.feed() flushes buffer
|
||||
return e
|
||||
|
||||
|
||||
def aes_decrypt_with_iv(key: bytes, iv: bytes, data: bytes) -> bytes:
|
||||
assert_bytes(key, iv, data)
|
||||
if AES:
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
data = cipher.decrypt(data)
|
||||
else:
|
||||
aes_cbc = pyaes.AESModeOfOperationCBC(key, iv=iv)
|
||||
aes = pyaes.Decrypter(aes_cbc, padding=pyaes.PADDING_NONE)
|
||||
data = aes.feed(data) + aes.feed() # empty aes.feed() flushes buffer
|
||||
try:
|
||||
return strip_PKCS7_padding(data)
|
||||
except InvalidPadding:
|
||||
raise InvalidPassword()
|
||||
|
||||
|
||||
def EncodeAES_base64(secret: bytes, msg: bytes) -> bytes:
|
||||
"""Returns base64 encoded ciphertext."""
|
||||
e = EncodeAES_bytes(secret, msg)
|
||||
return base64.b64encode(e)
|
||||
|
||||
|
||||
def EncodeAES_bytes(secret: bytes, msg: bytes) -> bytes:
|
||||
assert_bytes(msg)
|
||||
iv = bytes(os.urandom(16))
|
||||
ct = aes_encrypt_with_iv(secret, iv, msg)
|
||||
return iv + ct
|
||||
|
||||
|
||||
def DecodeAES_base64(secret: bytes, ciphertext_b64: Union[bytes, str]) -> bytes:
|
||||
ciphertext = bytes(base64.b64decode(ciphertext_b64))
|
||||
return DecodeAES_bytes(secret, ciphertext)
|
||||
|
||||
|
||||
def DecodeAES_bytes(secret: bytes, ciphertext: bytes) -> bytes:
|
||||
assert_bytes(ciphertext)
|
||||
iv, e = ciphertext[:16], ciphertext[16:]
|
||||
s = aes_decrypt_with_iv(secret, iv, e)
|
||||
return s
|
||||
|
||||
|
||||
PW_HASH_VERSION_LATEST = 1
|
||||
KNOWN_PW_HASH_VERSIONS = (1, 2, )
|
||||
SUPPORTED_PW_HASH_VERSIONS = (1, )
|
||||
assert PW_HASH_VERSION_LATEST in KNOWN_PW_HASH_VERSIONS
|
||||
assert PW_HASH_VERSION_LATEST in SUPPORTED_PW_HASH_VERSIONS
|
||||
|
||||
|
||||
class UnexpectedPasswordHashVersion(InvalidPassword, WalletFileException):
|
||||
def __init__(self, version):
|
||||
self.version = version
|
||||
|
||||
def __str__(self):
|
||||
return "{unexpected}: {version}\n{instruction}".format(
|
||||
unexpected=_("Unexpected password hash version"),
|
||||
version=self.version,
|
||||
instruction=_('You are most likely using an outdated version of Electrum. Please update.'))
|
||||
|
||||
|
||||
class UnsupportedPasswordHashVersion(InvalidPassword, WalletFileException):
|
||||
def __init__(self, version):
|
||||
self.version = version
|
||||
|
||||
def __str__(self):
|
||||
return "{unsupported}: {version}\n{instruction}".format(
|
||||
unsupported=_("Unsupported password hash version"),
|
||||
version=self.version,
|
||||
instruction=f"To open this wallet, try 'git checkout password_v{self.version}'.\n"
|
||||
"Alternatively, restore from seed.")
|
||||
|
||||
|
||||
def _hash_password(password: Union[bytes, str], *, version: int) -> bytes:
|
||||
pw = to_bytes(password, 'utf8')
|
||||
if version not in SUPPORTED_PW_HASH_VERSIONS:
|
||||
raise UnsupportedPasswordHashVersion(version)
|
||||
if version == 1:
|
||||
return sha256d(pw)
|
||||
else:
|
||||
assert version not in KNOWN_PW_HASH_VERSIONS
|
||||
raise UnexpectedPasswordHashVersion(version)
|
||||
|
||||
|
||||
def pw_encode(data: str, password: Union[bytes, str, None], *, version: int) -> str:
|
||||
if not password:
|
||||
return data
|
||||
if version not in KNOWN_PW_HASH_VERSIONS:
|
||||
raise UnexpectedPasswordHashVersion(version)
|
||||
# derive key from password
|
||||
secret = _hash_password(password, version=version)
|
||||
# encrypt given data
|
||||
ciphertext = EncodeAES_bytes(secret, to_bytes(data, "utf8"))
|
||||
ciphertext_b64 = base64.b64encode(ciphertext)
|
||||
return ciphertext_b64.decode('utf8')
|
||||
|
||||
|
||||
def pw_decode(data: str, password: Union[bytes, str, None], *, version: int) -> str:
|
||||
if password is None:
|
||||
return data
|
||||
if version not in KNOWN_PW_HASH_VERSIONS:
|
||||
raise UnexpectedPasswordHashVersion(version)
|
||||
data_bytes = bytes(base64.b64decode(data))
|
||||
# derive key from password
|
||||
secret = _hash_password(password, version=version)
|
||||
# decrypt given data
|
||||
try:
|
||||
d = to_string(DecodeAES_bytes(secret, data_bytes), "utf8")
|
||||
except Exception as e:
|
||||
raise InvalidPassword() from e
|
||||
return d
|
||||
|
||||
|
||||
def sha256(x: Union[bytes, str]) -> bytes:
|
||||
x = to_bytes(x, 'utf8')
|
||||
return bytes(hashlib.sha256(x).digest())
|
||||
|
||||
|
||||
def sha256d(x: Union[bytes, str]) -> bytes:
|
||||
x = to_bytes(x, 'utf8')
|
||||
out = bytes(sha256(sha256(x)))
|
||||
return out
|
||||
|
||||
|
||||
def hash_160(x: bytes) -> bytes:
|
||||
try:
|
||||
md = hashlib.new('ripemd160')
|
||||
md.update(sha256(x))
|
||||
return md.digest()
|
||||
except BaseException:
|
||||
from . import ripemd
|
||||
md = ripemd.new(sha256(x))
|
||||
return md.digest()
|
||||
|
||||
|
||||
def hmac_oneshot(key: bytes, msg: bytes, digest) -> bytes:
|
||||
if hasattr(hmac, 'digest'):
|
||||
# requires python 3.7+; faster
|
||||
return hmac.digest(key, msg, digest)
|
||||
else:
|
||||
return hmac.new(key, msg, digest).digest()
|
||||
@ -1,44 +0,0 @@
|
||||
{
|
||||
"CoinMarketcap": [
|
||||
"AED",
|
||||
"ALL",
|
||||
"ARS",
|
||||
"AUD",
|
||||
"BHD",
|
||||
"BOB",
|
||||
"BRL",
|
||||
"KHR",
|
||||
"CAD",
|
||||
"CLP",
|
||||
"CNY",
|
||||
"COP",
|
||||
"CUP",
|
||||
"CZK",
|
||||
"EGP",
|
||||
"EUR",
|
||||
"HKD",
|
||||
"HUF",
|
||||
"ISK",
|
||||
"INR",
|
||||
"IDR",
|
||||
"IQD",
|
||||
"ILS",
|
||||
"JPY",
|
||||
"LBP",
|
||||
"MYR",
|
||||
"MXN",
|
||||
"NPR",
|
||||
"NZD",
|
||||
"NGN",
|
||||
"NOK",
|
||||
"GBP",
|
||||
"QAR",
|
||||
"RUB",
|
||||
"SGD",
|
||||
"SEK",
|
||||
"CHF",
|
||||
"THB",
|
||||
"USD",
|
||||
"VND"
|
||||
]
|
||||
}
|
||||
442
electrum/ecc.py
@ -1,442 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2018 The Electrum developers
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
from typing import Union, Tuple
|
||||
|
||||
import ecdsa
|
||||
from ecdsa.ecdsa import curve_secp256k1, generator_secp256k1
|
||||
from ecdsa.curves import SECP256k1
|
||||
from ecdsa.ellipticcurve import Point
|
||||
from ecdsa.util import string_to_number, number_to_string
|
||||
|
||||
from .util import bfh, bh2u, assert_bytes, print_error, to_bytes, InvalidPassword, profiler
|
||||
from .crypto import (sha256d, aes_encrypt_with_iv, aes_decrypt_with_iv, hmac_oneshot)
|
||||
from .ecc_fast import do_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1
|
||||
from . import msqr
|
||||
from . import constants
|
||||
|
||||
|
||||
do_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1()
|
||||
|
||||
CURVE_ORDER = SECP256k1.order
|
||||
|
||||
|
||||
def generator():
|
||||
return ECPubkey.from_point(generator_secp256k1)
|
||||
|
||||
|
||||
def point_at_infinity():
|
||||
return ECPubkey(None)
|
||||
|
||||
|
||||
def sig_string_from_der_sig(der_sig: bytes, order=CURVE_ORDER) -> bytes:
|
||||
r, s = ecdsa.util.sigdecode_der(der_sig, order)
|
||||
return ecdsa.util.sigencode_string(r, s, order)
|
||||
|
||||
|
||||
def der_sig_from_sig_string(sig_string: bytes, order=CURVE_ORDER) -> bytes:
|
||||
r, s = ecdsa.util.sigdecode_string(sig_string, order)
|
||||
return ecdsa.util.sigencode_der_canonize(r, s, order)
|
||||
|
||||
|
||||
def der_sig_from_r_and_s(r: int, s: int, order=CURVE_ORDER) -> bytes:
|
||||
return ecdsa.util.sigencode_der_canonize(r, s, order)
|
||||
|
||||
|
||||
def get_r_and_s_from_der_sig(der_sig: bytes, order=CURVE_ORDER) -> Tuple[int, int]:
|
||||
r, s = ecdsa.util.sigdecode_der(der_sig, order)
|
||||
return r, s
|
||||
|
||||
|
||||
def get_r_and_s_from_sig_string(sig_string: bytes, order=CURVE_ORDER) -> Tuple[int, int]:
|
||||
r, s = ecdsa.util.sigdecode_string(sig_string, order)
|
||||
return r, s
|
||||
|
||||
|
||||
def sig_string_from_r_and_s(r: int, s: int, order=CURVE_ORDER) -> bytes:
|
||||
return ecdsa.util.sigencode_string_canonize(r, s, order)
|
||||
|
||||
|
||||
def point_to_ser(P, compressed=True) -> bytes:
|
||||
if isinstance(P, tuple):
|
||||
assert len(P) == 2, 'unexpected point: %s' % P
|
||||
x, y = P
|
||||
else:
|
||||
x, y = P.x(), P.y()
|
||||
if x is None or y is None: # infinity
|
||||
return None
|
||||
if compressed:
|
||||
return bfh(('%02x' % (2+(y&1))) + ('%064x' % x))
|
||||
return bfh('04'+('%064x' % x)+('%064x' % y))
|
||||
|
||||
|
||||
def get_y_coord_from_x(x: int, odd: bool=True) -> int:
|
||||
curve = curve_secp256k1
|
||||
_p = curve.p()
|
||||
_a = curve.a()
|
||||
_b = curve.b()
|
||||
x = x % _p
|
||||
y2 = (pow(x, 3, _p) + _a * x + _b) % _p
|
||||
y = msqr.modular_sqrt(y2, _p)
|
||||
if curve.contains_point(x, y):
|
||||
if odd == bool(y & 1):
|
||||
return y
|
||||
return _p - y
|
||||
raise InvalidECPointException()
|
||||
|
||||
|
||||
def ser_to_point(ser: bytes) -> Tuple[int, int]:
|
||||
if ser[0] not in (0x02, 0x03, 0x04):
|
||||
raise ValueError('Unexpected first byte: {}'.format(ser[0]))
|
||||
if ser[0] == 0x04:
|
||||
return string_to_number(ser[1:33]), string_to_number(ser[33:])
|
||||
x = string_to_number(ser[1:])
|
||||
return x, get_y_coord_from_x(x, ser[0] == 0x03)
|
||||
|
||||
|
||||
def _ser_to_python_ecdsa_point(ser: bytes) -> ecdsa.ellipticcurve.Point:
|
||||
x, y = ser_to_point(ser)
|
||||
try:
|
||||
return Point(curve_secp256k1, x, y, CURVE_ORDER)
|
||||
except:
|
||||
raise InvalidECPointException()
|
||||
|
||||
|
||||
class InvalidECPointException(Exception):
|
||||
"""e.g. not on curve, or infinity"""
|
||||
|
||||
|
||||
class _MyVerifyingKey(ecdsa.VerifyingKey):
|
||||
@classmethod
|
||||
def from_signature(klass, sig, recid, h, curve): # TODO use libsecp??
|
||||
""" See http://www.secg.org/download/aid-780/sec1-v2.pdf, chapter 4.1.6 """
|
||||
from ecdsa import util, numbertheory
|
||||
from . import msqr
|
||||
curveFp = curve.curve
|
||||
G = curve.generator
|
||||
order = G.order()
|
||||
# extract r,s from signature
|
||||
r, s = util.sigdecode_string(sig, order)
|
||||
# 1.1
|
||||
x = r + (recid//2) * order
|
||||
# 1.3
|
||||
alpha = ( x * x * x + curveFp.a() * x + curveFp.b() ) % curveFp.p()
|
||||
beta = msqr.modular_sqrt(alpha, curveFp.p())
|
||||
y = beta if (beta - recid) % 2 == 0 else curveFp.p() - beta
|
||||
# 1.4 the constructor checks that nR is at infinity
|
||||
try:
|
||||
R = Point(curveFp, x, y, order)
|
||||
except:
|
||||
raise InvalidECPointException()
|
||||
# 1.5 compute e from message:
|
||||
e = string_to_number(h)
|
||||
minus_e = -e % order
|
||||
# 1.6 compute Q = r^-1 (sR - eG)
|
||||
inv_r = numbertheory.inverse_mod(r,order)
|
||||
try:
|
||||
Q = inv_r * ( s * R + minus_e * G )
|
||||
except:
|
||||
raise InvalidECPointException()
|
||||
return klass.from_public_point( Q, curve )
|
||||
|
||||
|
||||
class _MySigningKey(ecdsa.SigningKey):
|
||||
"""Enforce low S values in signatures"""
|
||||
|
||||
def sign_number(self, number, entropy=None, k=None):
|
||||
r, s = ecdsa.SigningKey.sign_number(self, number, entropy, k)
|
||||
if s > CURVE_ORDER//2:
|
||||
s = CURVE_ORDER - s
|
||||
return r, s
|
||||
|
||||
|
||||
class _PubkeyForPointAtInfinity:
|
||||
point = ecdsa.ellipticcurve.INFINITY
|
||||
|
||||
|
||||
class ECPubkey(object):
|
||||
|
||||
def __init__(self, b: bytes):
|
||||
if b is not None:
|
||||
assert_bytes(b)
|
||||
point = _ser_to_python_ecdsa_point(b)
|
||||
self._pubkey = ecdsa.ecdsa.Public_key(generator_secp256k1, point)
|
||||
else:
|
||||
self._pubkey = _PubkeyForPointAtInfinity()
|
||||
|
||||
@classmethod
|
||||
def from_sig_string(cls, sig_string: bytes, recid: int, msg_hash: bytes):
|
||||
assert_bytes(sig_string)
|
||||
if len(sig_string) != 64:
|
||||
raise Exception('Wrong encoding')
|
||||
if recid < 0 or recid > 3:
|
||||
raise ValueError('recid is {}, but should be 0 <= recid <= 3'.format(recid))
|
||||
ecdsa_verifying_key = _MyVerifyingKey.from_signature(sig_string, recid, msg_hash, curve=SECP256k1)
|
||||
ecdsa_point = ecdsa_verifying_key.pubkey.point
|
||||
return ECPubkey.from_point(ecdsa_point)
|
||||
|
||||
@classmethod
|
||||
def from_signature65(cls, sig: bytes, msg_hash: bytes):
|
||||
if len(sig) != 65:
|
||||
raise Exception("Wrong encoding")
|
||||
nV = sig[0]
|
||||
if nV < 27 or nV >= 35:
|
||||
raise Exception("Bad encoding")
|
||||
if nV >= 31:
|
||||
compressed = True
|
||||
nV -= 4
|
||||
else:
|
||||
compressed = False
|
||||
recid = nV - 27
|
||||
return cls.from_sig_string(sig[1:], recid, msg_hash), compressed
|
||||
|
||||
@classmethod
|
||||
def from_point(cls, point):
|
||||
_bytes = point_to_ser(point, compressed=False) # faster than compressed
|
||||
return ECPubkey(_bytes)
|
||||
|
||||
def get_public_key_bytes(self, compressed=True):
|
||||
if self.is_at_infinity(): raise Exception('point is at infinity')
|
||||
return point_to_ser(self.point(), compressed)
|
||||
|
||||
def get_public_key_hex(self, compressed=True):
|
||||
return bh2u(self.get_public_key_bytes(compressed))
|
||||
|
||||
def point(self) -> Tuple[int, int]:
|
||||
return self._pubkey.point.x(), self._pubkey.point.y()
|
||||
|
||||
def __mul__(self, other: int):
|
||||
if not isinstance(other, int):
|
||||
raise TypeError('multiplication not defined for ECPubkey and {}'.format(type(other)))
|
||||
ecdsa_point = self._pubkey.point * other
|
||||
return self.from_point(ecdsa_point)
|
||||
|
||||
def __rmul__(self, other: int):
|
||||
return self * other
|
||||
|
||||
def __add__(self, other):
|
||||
if not isinstance(other, ECPubkey):
|
||||
raise TypeError('addition not defined for ECPubkey and {}'.format(type(other)))
|
||||
ecdsa_point = self._pubkey.point + other._pubkey.point
|
||||
return self.from_point(ecdsa_point)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._pubkey.point.x() == other._pubkey.point.x() \
|
||||
and self._pubkey.point.y() == other._pubkey.point.y()
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def verify_message_for_address(self, sig65: bytes, message: bytes) -> None:
|
||||
assert_bytes(message)
|
||||
h = sha256d(msg_magic(message))
|
||||
public_key, compressed = self.from_signature65(sig65, h)
|
||||
# check public key
|
||||
if public_key != self:
|
||||
raise Exception("Bad signature")
|
||||
# check message
|
||||
self.verify_message_hash(sig65[1:], h)
|
||||
|
||||
def verify_message_hash(self, sig_string: bytes, msg_hash: bytes) -> None:
|
||||
assert_bytes(sig_string)
|
||||
if len(sig_string) != 64:
|
||||
raise Exception('Wrong encoding')
|
||||
ecdsa_point = self._pubkey.point
|
||||
verifying_key = _MyVerifyingKey.from_public_point(ecdsa_point, curve=SECP256k1)
|
||||
verifying_key.verify_digest(sig_string, msg_hash, sigdecode=ecdsa.util.sigdecode_string)
|
||||
|
||||
def encrypt_message(self, message: bytes, magic: bytes = b'BIE1'):
|
||||
"""
|
||||
ECIES encryption/decryption methods; AES-128-CBC with PKCS7 is used as the cipher; hmac-sha256 is used as the mac
|
||||
"""
|
||||
assert_bytes(message)
|
||||
|
||||
randint = ecdsa.util.randrange(CURVE_ORDER)
|
||||
ephemeral_exponent = number_to_string(randint, CURVE_ORDER)
|
||||
ephemeral = ECPrivkey(ephemeral_exponent)
|
||||
ecdh_key = (self * ephemeral.secret_scalar).get_public_key_bytes(compressed=True)
|
||||
key = hashlib.sha512(ecdh_key).digest()
|
||||
iv, key_e, key_m = key[0:16], key[16:32], key[32:]
|
||||
ciphertext = aes_encrypt_with_iv(key_e, iv, message)
|
||||
ephemeral_pubkey = ephemeral.get_public_key_bytes(compressed=True)
|
||||
encrypted = magic + ephemeral_pubkey + ciphertext
|
||||
mac = hmac_oneshot(key_m, encrypted, hashlib.sha256)
|
||||
|
||||
return base64.b64encode(encrypted + mac)
|
||||
|
||||
@classmethod
|
||||
def order(cls):
|
||||
return CURVE_ORDER
|
||||
|
||||
def is_at_infinity(self):
|
||||
return self == point_at_infinity()
|
||||
|
||||
@classmethod
|
||||
def is_pubkey_bytes(cls, b: bytes):
|
||||
try:
|
||||
ECPubkey(b)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def msg_magic(message: bytes) -> bytes:
|
||||
from .bitcoin import var_int
|
||||
length = bfh(var_int(len(message)))
|
||||
return b"\x18Bitcoin Signed Message:\n" + length + message
|
||||
|
||||
|
||||
def verify_message_with_address(address: str, sig65: bytes, message: bytes, *, net=None):
|
||||
from .bitcoin import pubkey_to_address
|
||||
assert_bytes(sig65, message)
|
||||
if net is None: net = constants.net
|
||||
try:
|
||||
h = sha256d(msg_magic(message))
|
||||
public_key, compressed = ECPubkey.from_signature65(sig65, h)
|
||||
# check public key using the address
|
||||
pubkey_hex = public_key.get_public_key_hex(compressed)
|
||||
for txin_type in ['p2pkh','p2wpkh','p2wpkh-p2sh']:
|
||||
addr = pubkey_to_address(txin_type, pubkey_hex, net=net)
|
||||
if address == addr:
|
||||
break
|
||||
else:
|
||||
raise Exception("Bad signature")
|
||||
# check message
|
||||
public_key.verify_message_hash(sig65[1:], h)
|
||||
return True
|
||||
except Exception as e:
|
||||
print_error(f"Verification error: {repr(e)}")
|
||||
return False
|
||||
|
||||
|
||||
def is_secret_within_curve_range(secret: Union[int, bytes]) -> bool:
|
||||
if isinstance(secret, bytes):
|
||||
secret = string_to_number(secret)
|
||||
return 0 < secret < CURVE_ORDER
|
||||
|
||||
|
||||
class ECPrivkey(ECPubkey):
|
||||
|
||||
def __init__(self, privkey_bytes: bytes):
|
||||
assert_bytes(privkey_bytes)
|
||||
if len(privkey_bytes) != 32:
|
||||
raise Exception('unexpected size for secret. should be 32 bytes, not {}'.format(len(privkey_bytes)))
|
||||
secret = string_to_number(privkey_bytes)
|
||||
if not is_secret_within_curve_range(secret):
|
||||
raise InvalidECPointException('Invalid secret scalar (not within curve order)')
|
||||
self.secret_scalar = secret
|
||||
|
||||
point = generator_secp256k1 * secret
|
||||
super().__init__(point_to_ser(point))
|
||||
self._privkey = ecdsa.ecdsa.Private_key(self._pubkey, secret)
|
||||
|
||||
@classmethod
|
||||
def from_secret_scalar(cls, secret_scalar: int):
|
||||
secret_bytes = number_to_string(secret_scalar, CURVE_ORDER)
|
||||
return ECPrivkey(secret_bytes)
|
||||
|
||||
@classmethod
|
||||
def from_arbitrary_size_secret(cls, privkey_bytes: bytes):
|
||||
"""This method is only for legacy reasons. Do not introduce new code that uses it.
|
||||
Unlike the default constructor, this method does not require len(privkey_bytes) == 32,
|
||||
and the secret does not need to be within the curve order either.
|
||||
"""
|
||||
return ECPrivkey(cls.normalize_secret_bytes(privkey_bytes))
|
||||
|
||||
@classmethod
|
||||
def normalize_secret_bytes(cls, privkey_bytes: bytes) -> bytes:
|
||||
scalar = string_to_number(privkey_bytes) % CURVE_ORDER
|
||||
if scalar == 0:
|
||||
raise Exception('invalid EC private key scalar: zero')
|
||||
privkey_32bytes = number_to_string(scalar, CURVE_ORDER)
|
||||
return privkey_32bytes
|
||||
|
||||
def sign(self, data: bytes, sigencode=None, sigdecode=None) -> bytes:
|
||||
if sigencode is None:
|
||||
sigencode = sig_string_from_r_and_s
|
||||
if sigdecode is None:
|
||||
sigdecode = get_r_and_s_from_sig_string
|
||||
private_key = _MySigningKey.from_secret_exponent(self.secret_scalar, curve=SECP256k1)
|
||||
sig = private_key.sign_digest_deterministic(data, hashfunc=hashlib.sha256, sigencode=sigencode)
|
||||
public_key = private_key.get_verifying_key()
|
||||
if not public_key.verify_digest(sig, data, sigdecode=sigdecode):
|
||||
raise Exception('Sanity check verifying our own signature failed.')
|
||||
return sig
|
||||
|
||||
def sign_transaction(self, hashed_preimage: bytes) -> bytes:
|
||||
return self.sign(hashed_preimage,
|
||||
sigencode=der_sig_from_r_and_s,
|
||||
sigdecode=get_r_and_s_from_der_sig)
|
||||
|
||||
def sign_message(self, message: bytes, is_compressed: bool) -> bytes:
|
||||
def bruteforce_recid(sig_string):
|
||||
for recid in range(4):
|
||||
sig65 = construct_sig65(sig_string, recid, is_compressed)
|
||||
try:
|
||||
self.verify_message_for_address(sig65, message)
|
||||
return sig65, recid
|
||||
except Exception as e:
|
||||
continue
|
||||
else:
|
||||
raise Exception("error: cannot sign message. no recid fits..")
|
||||
|
||||
message = to_bytes(message, 'utf8')
|
||||
msg_hash = sha256d(msg_magic(message))
|
||||
sig_string = self.sign(msg_hash,
|
||||
sigencode=sig_string_from_r_and_s,
|
||||
sigdecode=get_r_and_s_from_sig_string)
|
||||
sig65, recid = bruteforce_recid(sig_string)
|
||||
return sig65
|
||||
|
||||
def decrypt_message(self, encrypted: Tuple[str, bytes], magic: bytes=b'BIE1') -> bytes:
|
||||
encrypted = base64.b64decode(encrypted)
|
||||
if len(encrypted) < 85:
|
||||
raise Exception('invalid ciphertext: length')
|
||||
magic_found = encrypted[:4]
|
||||
ephemeral_pubkey_bytes = encrypted[4:37]
|
||||
ciphertext = encrypted[37:-32]
|
||||
mac = encrypted[-32:]
|
||||
if magic_found != magic:
|
||||
raise Exception('invalid ciphertext: invalid magic bytes')
|
||||
try:
|
||||
ecdsa_point = _ser_to_python_ecdsa_point(ephemeral_pubkey_bytes)
|
||||
except AssertionError as e:
|
||||
raise Exception('invalid ciphertext: invalid ephemeral pubkey') from e
|
||||
if not ecdsa.ecdsa.point_is_valid(generator_secp256k1, ecdsa_point.x(), ecdsa_point.y()):
|
||||
raise Exception('invalid ciphertext: invalid ephemeral pubkey')
|
||||
ephemeral_pubkey = ECPubkey.from_point(ecdsa_point)
|
||||
ecdh_key = (ephemeral_pubkey * self.secret_scalar).get_public_key_bytes(compressed=True)
|
||||
key = hashlib.sha512(ecdh_key).digest()
|
||||
iv, key_e, key_m = key[0:16], key[16:32], key[32:]
|
||||
if mac != hmac_oneshot(key_m, encrypted[:-32], hashlib.sha256):
|
||||
raise InvalidPassword()
|
||||
return aes_decrypt_with_iv(key_e, iv, ciphertext)
|
||||
|
||||
|
||||
def construct_sig65(sig_string: bytes, recid: int, is_compressed: bool) -> bytes:
|
||||
comp = 4 if is_compressed else 0
|
||||
return bytes([27 + recid + comp]) + sig_string
|
||||
@ -1,223 +0,0 @@
|
||||
# taken (with minor modifications) from pycoin
|
||||
# https://github.com/richardkiss/pycoin/blob/01b1787ed902df23f99a55deb00d8cd076a906fe/pycoin/ecdsa/native/secp256k1.py
|
||||
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
import ctypes
|
||||
from ctypes.util import find_library
|
||||
from ctypes import (
|
||||
byref, c_byte, c_int, c_uint, c_char_p, c_size_t, c_void_p, create_string_buffer, CFUNCTYPE, POINTER
|
||||
)
|
||||
|
||||
import ecdsa
|
||||
|
||||
from .util import print_stderr, print_error
|
||||
|
||||
|
||||
SECP256K1_FLAGS_TYPE_MASK = ((1 << 8) - 1)
|
||||
SECP256K1_FLAGS_TYPE_CONTEXT = (1 << 0)
|
||||
SECP256K1_FLAGS_TYPE_COMPRESSION = (1 << 1)
|
||||
# /** The higher bits contain the actual data. Do not use directly. */
|
||||
SECP256K1_FLAGS_BIT_CONTEXT_VERIFY = (1 << 8)
|
||||
SECP256K1_FLAGS_BIT_CONTEXT_SIGN = (1 << 9)
|
||||
SECP256K1_FLAGS_BIT_COMPRESSION = (1 << 8)
|
||||
|
||||
# /** Flags to pass to secp256k1_context_create. */
|
||||
SECP256K1_CONTEXT_VERIFY = (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_VERIFY)
|
||||
SECP256K1_CONTEXT_SIGN = (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_SIGN)
|
||||
SECP256K1_CONTEXT_NONE = (SECP256K1_FLAGS_TYPE_CONTEXT)
|
||||
|
||||
SECP256K1_EC_COMPRESSED = (SECP256K1_FLAGS_TYPE_COMPRESSION | SECP256K1_FLAGS_BIT_COMPRESSION)
|
||||
SECP256K1_EC_UNCOMPRESSED = (SECP256K1_FLAGS_TYPE_COMPRESSION)
|
||||
|
||||
|
||||
def load_library():
|
||||
if sys.platform == 'darwin':
|
||||
library_path = 'libsecp256k1.0.dylib'
|
||||
elif sys.platform in ('windows', 'win32'):
|
||||
library_path = 'libsecp256k1.dll'
|
||||
elif 'ANDROID_DATA' in os.environ:
|
||||
library_path = 'libsecp256k1.so'
|
||||
else:
|
||||
library_path = 'libsecp256k1.so.0'
|
||||
|
||||
secp256k1 = ctypes.cdll.LoadLibrary(library_path)
|
||||
if not secp256k1:
|
||||
print_stderr('[ecc] warning: libsecp256k1 library failed to load')
|
||||
return None
|
||||
|
||||
try:
|
||||
secp256k1.secp256k1_context_create.argtypes = [c_uint]
|
||||
secp256k1.secp256k1_context_create.restype = c_void_p
|
||||
|
||||
secp256k1.secp256k1_context_randomize.argtypes = [c_void_p, c_char_p]
|
||||
secp256k1.secp256k1_context_randomize.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ec_pubkey_create.argtypes = [c_void_p, c_void_p, c_char_p]
|
||||
secp256k1.secp256k1_ec_pubkey_create.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ecdsa_sign.argtypes = [c_void_p, c_char_p, c_char_p, c_char_p, c_void_p, c_void_p]
|
||||
secp256k1.secp256k1_ecdsa_sign.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ecdsa_verify.argtypes = [c_void_p, c_char_p, c_char_p, c_char_p]
|
||||
secp256k1.secp256k1_ecdsa_verify.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ec_pubkey_parse.argtypes = [c_void_p, c_char_p, c_char_p, c_size_t]
|
||||
secp256k1.secp256k1_ec_pubkey_parse.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ec_pubkey_serialize.argtypes = [c_void_p, c_char_p, c_void_p, c_char_p, c_uint]
|
||||
secp256k1.secp256k1_ec_pubkey_serialize.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ecdsa_signature_parse_compact.argtypes = [c_void_p, c_char_p, c_char_p]
|
||||
secp256k1.secp256k1_ecdsa_signature_parse_compact.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ecdsa_signature_normalize.argtypes = [c_void_p, c_char_p, c_char_p]
|
||||
secp256k1.secp256k1_ecdsa_signature_normalize.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ecdsa_signature_serialize_compact.argtypes = [c_void_p, c_char_p, c_char_p]
|
||||
secp256k1.secp256k1_ecdsa_signature_serialize_compact.restype = c_int
|
||||
|
||||
secp256k1.secp256k1_ec_pubkey_tweak_mul.argtypes = [c_void_p, c_char_p, c_char_p]
|
||||
secp256k1.secp256k1_ec_pubkey_tweak_mul.restype = c_int
|
||||
|
||||
secp256k1.ctx = secp256k1.secp256k1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY)
|
||||
r = secp256k1.secp256k1_context_randomize(secp256k1.ctx, os.urandom(32))
|
||||
if r:
|
||||
return secp256k1
|
||||
else:
|
||||
print_stderr('[ecc] warning: secp256k1_context_randomize failed')
|
||||
return None
|
||||
except (OSError, AttributeError):
|
||||
#traceback.print_exc(file=sys.stderr)
|
||||
print_stderr('[ecc] warning: libsecp256k1 library was found and loaded but there was an error when using it')
|
||||
return None
|
||||
|
||||
|
||||
class _patched_functions:
|
||||
prepared_to_patch = False
|
||||
monkey_patching_active = False
|
||||
|
||||
|
||||
def _prepare_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1():
|
||||
if not _libsecp256k1:
|
||||
return
|
||||
|
||||
# save original functions so that we can undo patching (needed for tests)
|
||||
_patched_functions.orig_sign = staticmethod(ecdsa.ecdsa.Private_key.sign)
|
||||
_patched_functions.orig_verify = staticmethod(ecdsa.ecdsa.Public_key.verifies)
|
||||
_patched_functions.orig_mul = staticmethod(ecdsa.ellipticcurve.Point.__mul__)
|
||||
|
||||
curve_secp256k1 = ecdsa.ecdsa.curve_secp256k1
|
||||
curve_order = ecdsa.curves.SECP256k1.order
|
||||
point_at_infinity = ecdsa.ellipticcurve.INFINITY
|
||||
|
||||
def mul(self: ecdsa.ellipticcurve.Point, other: int):
|
||||
if self.curve() != curve_secp256k1:
|
||||
# this operation is not on the secp256k1 curve; use original implementation
|
||||
return _patched_functions.orig_mul(self, other)
|
||||
other %= curve_order
|
||||
if self == point_at_infinity or other == 0:
|
||||
return point_at_infinity
|
||||
pubkey = create_string_buffer(64)
|
||||
public_pair_bytes = b'\4' + self.x().to_bytes(32, byteorder="big") + self.y().to_bytes(32, byteorder="big")
|
||||
r = _libsecp256k1.secp256k1_ec_pubkey_parse(
|
||||
_libsecp256k1.ctx, pubkey, public_pair_bytes, len(public_pair_bytes))
|
||||
if not r:
|
||||
return False
|
||||
r = _libsecp256k1.secp256k1_ec_pubkey_tweak_mul(_libsecp256k1.ctx, pubkey, other.to_bytes(32, byteorder="big"))
|
||||
if not r:
|
||||
return point_at_infinity
|
||||
|
||||
pubkey_serialized = create_string_buffer(65)
|
||||
pubkey_size = c_size_t(65)
|
||||
_libsecp256k1.secp256k1_ec_pubkey_serialize(
|
||||
_libsecp256k1.ctx, pubkey_serialized, byref(pubkey_size), pubkey, SECP256K1_EC_UNCOMPRESSED)
|
||||
x = int.from_bytes(pubkey_serialized[1:33], byteorder="big")
|
||||
y = int.from_bytes(pubkey_serialized[33:], byteorder="big")
|
||||
return ecdsa.ellipticcurve.Point(curve_secp256k1, x, y, curve_order)
|
||||
|
||||
def sign(self: ecdsa.ecdsa.Private_key, hash: int, random_k: int):
|
||||
# note: random_k is ignored
|
||||
if self.public_key.curve != curve_secp256k1:
|
||||
# this operation is not on the secp256k1 curve; use original implementation
|
||||
return _patched_functions.orig_sign(self, hash, random_k)
|
||||
secret_exponent = self.secret_multiplier
|
||||
nonce_function = None
|
||||
sig = create_string_buffer(64)
|
||||
sig_hash_bytes = hash.to_bytes(32, byteorder="big")
|
||||
_libsecp256k1.secp256k1_ecdsa_sign(
|
||||
_libsecp256k1.ctx, sig, sig_hash_bytes, secret_exponent.to_bytes(32, byteorder="big"), nonce_function, None)
|
||||
compact_signature = create_string_buffer(64)
|
||||
_libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(_libsecp256k1.ctx, compact_signature, sig)
|
||||
r = int.from_bytes(compact_signature[:32], byteorder="big")
|
||||
s = int.from_bytes(compact_signature[32:], byteorder="big")
|
||||
return ecdsa.ecdsa.Signature(r, s)
|
||||
|
||||
def verify(self: ecdsa.ecdsa.Public_key, hash: int, signature: ecdsa.ecdsa.Signature):
|
||||
if self.curve != curve_secp256k1:
|
||||
# this operation is not on the secp256k1 curve; use original implementation
|
||||
return _patched_functions.orig_verify(self, hash, signature)
|
||||
sig = create_string_buffer(64)
|
||||
input64 = signature.r.to_bytes(32, byteorder="big") + signature.s.to_bytes(32, byteorder="big")
|
||||
r = _libsecp256k1.secp256k1_ecdsa_signature_parse_compact(_libsecp256k1.ctx, sig, input64)
|
||||
if not r:
|
||||
return False
|
||||
r = _libsecp256k1.secp256k1_ecdsa_signature_normalize(_libsecp256k1.ctx, sig, sig)
|
||||
|
||||
public_pair_bytes = b'\4' + self.point.x().to_bytes(32, byteorder="big") + self.point.y().to_bytes(32, byteorder="big")
|
||||
pubkey = create_string_buffer(64)
|
||||
r = _libsecp256k1.secp256k1_ec_pubkey_parse(
|
||||
_libsecp256k1.ctx, pubkey, public_pair_bytes, len(public_pair_bytes))
|
||||
if not r:
|
||||
return False
|
||||
|
||||
return 1 == _libsecp256k1.secp256k1_ecdsa_verify(_libsecp256k1.ctx, sig, hash.to_bytes(32, byteorder="big"), pubkey)
|
||||
|
||||
# save new functions so that we can (re-)do patching
|
||||
_patched_functions.fast_sign = sign
|
||||
_patched_functions.fast_verify = verify
|
||||
_patched_functions.fast_mul = mul
|
||||
|
||||
_patched_functions.prepared_to_patch = True
|
||||
|
||||
|
||||
def do_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1():
|
||||
if not _libsecp256k1:
|
||||
# FIXME print_error will always print as 'verbosity' is not yet initialised
|
||||
print_error('[ecc] info: libsecp256k1 library not available, falling back to python-ecdsa. '
|
||||
'This means signing operations will be slower.')
|
||||
return
|
||||
if not _patched_functions.prepared_to_patch:
|
||||
raise Exception("can't patch python-ecdsa without preparations")
|
||||
ecdsa.ecdsa.Private_key.sign = _patched_functions.fast_sign
|
||||
ecdsa.ecdsa.Public_key.verifies = _patched_functions.fast_verify
|
||||
ecdsa.ellipticcurve.Point.__mul__ = _patched_functions.fast_mul
|
||||
# ecdsa.ellipticcurve.Point.__add__ = ... # TODO??
|
||||
|
||||
_patched_functions.monkey_patching_active = True
|
||||
|
||||
|
||||
def undo_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1():
|
||||
if not _libsecp256k1:
|
||||
return
|
||||
if not _patched_functions.prepared_to_patch:
|
||||
raise Exception("can't patch python-ecdsa without preparations")
|
||||
ecdsa.ecdsa.Private_key.sign = _patched_functions.orig_sign
|
||||
ecdsa.ecdsa.Public_key.verifies = _patched_functions.orig_verify
|
||||
ecdsa.ellipticcurve.Point.__mul__ = _patched_functions.orig_mul
|
||||
|
||||
_patched_functions.monkey_patching_active = False
|
||||
|
||||
|
||||
def is_using_fast_ecc():
|
||||
return _patched_functions.monkey_patching_active
|
||||
|
||||
|
||||
try:
|
||||
_libsecp256k1 = load_library()
|
||||
except:
|
||||
_libsecp256k1 = None
|
||||
#traceback.print_exc(file=sys.stderr)
|
||||
|
||||
_prepare_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1()
|
||||
@ -1 +0,0 @@
|
||||
../run_electrum
|
||||
@ -1,616 +0,0 @@
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import inspect
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import csv
|
||||
import decimal
|
||||
from decimal import Decimal
|
||||
import concurrent.futures
|
||||
import traceback
|
||||
from typing import Sequence
|
||||
|
||||
from .bitcoin import COIN
|
||||
from .i18n import _
|
||||
from .util import (PrintError, ThreadJob, make_dir, log_exceptions,
|
||||
make_aiohttp_session, resource_path)
|
||||
from .network import Network
|
||||
from .simple_config import SimpleConfig
|
||||
|
||||
|
||||
# See https://en.wikipedia.org/wiki/ISO_4217
|
||||
CCY_PRECISIONS = {'BHD': 3, 'BIF': 0, 'BYR': 0, 'CLF': 4, 'CLP': 0,
|
||||
'CVE': 0, 'DJF': 0, 'GNF': 0, 'IQD': 3, 'ISK': 0,
|
||||
'JOD': 3, 'JPY': 0, 'KMF': 0, 'KRW': 0, 'KWD': 3,
|
||||
'LYD': 3, 'MGA': 1, 'MRO': 1, 'OMR': 3, 'PYG': 0,
|
||||
'RWF': 0, 'TND': 3, 'UGX': 0, 'UYI': 0, 'VND': 0,
|
||||
'VUV': 0, 'XAF': 0, 'XAU': 4, 'XOF': 0, 'XPF': 0}
|
||||
|
||||
|
||||
class ExchangeBase(PrintError):
|
||||
|
||||
def __init__(self, on_quotes, on_history):
|
||||
self.history = {}
|
||||
self.quotes = {}
|
||||
self.on_quotes = on_quotes
|
||||
self.on_history = on_history
|
||||
|
||||
async def get_raw(self, site, get_string):
|
||||
# APIs must have https
|
||||
url = ''.join(['https://', site, get_string])
|
||||
async with make_aiohttp_session(Network.get_instance().proxy) as session:
|
||||
async with session.get(url) as response:
|
||||
return await response.text()
|
||||
|
||||
async def get_json(self, site, get_string):
|
||||
# APIs must have https
|
||||
url = ''.join(['https://', site, get_string])
|
||||
async with make_aiohttp_session(Network.get_instance().proxy) as session:
|
||||
async with session.get(url) as response:
|
||||
# set content_type to None to disable checking MIME type
|
||||
return await response.json(content_type=None)
|
||||
|
||||
async def get_csv(self, site, get_string):
|
||||
raw = await self.get_raw(site, get_string)
|
||||
reader = csv.DictReader(raw.split('\n'))
|
||||
return list(reader)
|
||||
|
||||
def name(self):
|
||||
return self.__class__.__name__
|
||||
|
||||
@log_exceptions
|
||||
async def update_safe(self, ccy):
|
||||
try:
|
||||
self.print_error("getting fx quotes for", ccy)
|
||||
self.quotes = await self.get_rates(ccy)
|
||||
self.print_error("received fx quotes")
|
||||
except BaseException as e:
|
||||
self.print_error("failed fx quotes:", repr(e))
|
||||
self.quotes = {}
|
||||
self.on_quotes()
|
||||
|
||||
def update(self, ccy):
|
||||
asyncio.get_event_loop().create_task(self.update_safe(ccy))
|
||||
|
||||
def read_historical_rates(self, ccy, cache_dir):
|
||||
filename = os.path.join(cache_dir, self.name() + '_'+ ccy)
|
||||
if os.path.exists(filename):
|
||||
timestamp = os.stat(filename).st_mtime
|
||||
try:
|
||||
with open(filename, 'r', encoding='utf-8') as f:
|
||||
h = json.loads(f.read())
|
||||
h['timestamp'] = timestamp
|
||||
except:
|
||||
h = None
|
||||
else:
|
||||
h = None
|
||||
if h:
|
||||
self.history[ccy] = h
|
||||
self.on_history()
|
||||
return h
|
||||
|
||||
@log_exceptions
|
||||
async def get_historical_rates_safe(self, ccy, cache_dir):
|
||||
try:
|
||||
self.print_error("requesting fx history for", ccy)
|
||||
h = await self.request_history(ccy)
|
||||
self.print_error("received fx history for", ccy)
|
||||
except BaseException as e:
|
||||
self.print_error("failed fx history:", e)
|
||||
#traceback.print_exc()
|
||||
return
|
||||
filename = os.path.join(cache_dir, self.name() + '_' + ccy)
|
||||
with open(filename, 'w', encoding='utf-8') as f:
|
||||
f.write(json.dumps(h))
|
||||
h['timestamp'] = time.time()
|
||||
self.history[ccy] = h
|
||||
self.on_history()
|
||||
|
||||
def get_historical_rates(self, ccy, cache_dir):
|
||||
if ccy not in self.history_ccys():
|
||||
return
|
||||
h = self.history.get(ccy)
|
||||
if h is None:
|
||||
h = self.read_historical_rates(ccy, cache_dir)
|
||||
if h is None or h['timestamp'] < time.time() - 24*3600:
|
||||
asyncio.get_event_loop().create_task(self.get_historical_rates_safe(ccy, cache_dir))
|
||||
|
||||
def history_ccys(self):
|
||||
return []
|
||||
|
||||
def historical_rate(self, ccy, d_t):
|
||||
return self.history.get(ccy, {}).get(d_t.strftime('%Y-%m-%d'), 'NaN')
|
||||
|
||||
def get_currencies(self):
|
||||
rates = self.get_rates('')
|
||||
return sorted([str(a) for (a, b) in rates.items() if b is not None and len(a)==3])
|
||||
|
||||
class BitcoinAverage(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('apiv2.bitcoinaverage.com', '/indices/global/ticker/short')
|
||||
return dict([(r.replace("BTC", ""), Decimal(json[r]['last']))
|
||||
for r in json if r != 'timestamp'])
|
||||
|
||||
def history_ccys(self):
|
||||
return ['AUD', 'BRL', 'CAD', 'CHF', 'CNY', 'EUR', 'GBP', 'IDR', 'ILS',
|
||||
'MXN', 'NOK', 'NZD', 'PLN', 'RON', 'RUB', 'SEK', 'SGD', 'USD',
|
||||
'ZAR']
|
||||
|
||||
async def request_history(self, ccy):
|
||||
history = await self.get_csv('apiv2.bitcoinaverage.com',
|
||||
"/indices/global/history/BTC%s?period=alltime&format=csv" % ccy)
|
||||
return dict([(h['DateTime'][:10], h['Average'])
|
||||
for h in history])
|
||||
|
||||
|
||||
class Bitcointoyou(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('bitcointoyou.com', "/API/ticker.aspx")
|
||||
return {'BRL': Decimal(json['ticker']['last'])}
|
||||
|
||||
def history_ccys(self):
|
||||
return ['BRL']
|
||||
|
||||
|
||||
class BitcoinVenezuela(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('api.bitcoinvenezuela.com', '/')
|
||||
rates = [(r, json['BTC'][r]) for r in json['BTC']
|
||||
if json['BTC'][r] is not None] # Giving NULL for LTC
|
||||
return dict(rates)
|
||||
|
||||
def history_ccys(self):
|
||||
return ['ARS', 'EUR', 'USD', 'VEF']
|
||||
|
||||
async def request_history(self, ccy):
|
||||
json = await self.get_json('api.bitcoinvenezuela.com',
|
||||
"/historical/index.php?coin=BTC")
|
||||
return json[ccy +'_BTC']
|
||||
|
||||
|
||||
class Bitbank(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('public.bitbank.cc', '/btc_jpy/ticker')
|
||||
return {'JPY': Decimal(json['data']['last'])}
|
||||
|
||||
|
||||
class BitFlyer(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('bitflyer.jp', '/api/echo/price')
|
||||
return {'JPY': Decimal(json['mid'])}
|
||||
|
||||
|
||||
class Bitmarket(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('www.bitmarket.pl', '/json/BTCPLN/ticker.json')
|
||||
return {'PLN': Decimal(json['last'])}
|
||||
|
||||
|
||||
class BitPay(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('bitpay.com', '/api/rates')
|
||||
return dict([(r['code'], Decimal(r['rate'])) for r in json])
|
||||
|
||||
|
||||
class Bitso(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('api.bitso.com', '/v2/ticker')
|
||||
return {'MXN': Decimal(json['last'])}
|
||||
|
||||
|
||||
class BitStamp(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('www.bitstamp.net', '/api/ticker/')
|
||||
return {'USD': Decimal(json['last'])}
|
||||
|
||||
|
||||
class Bitvalor(ExchangeBase):
|
||||
|
||||
async def get_rates(self,ccy):
|
||||
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json')
|
||||
return {'BRL': Decimal(json['ticker_1h']['total']['last'])}
|
||||
|
||||
|
||||
class BlockchainInfo(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('blockchain.info', '/ticker')
|
||||
return dict([(r, Decimal(json[r]['15m'])) for r in json])
|
||||
|
||||
|
||||
class BTCChina(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('data.btcchina.com', '/data/ticker')
|
||||
return {'CNY': Decimal(json['ticker']['last'])}
|
||||
|
||||
|
||||
class BTCParalelo(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('btcparalelo.com', '/api/price')
|
||||
return {'VEF': Decimal(json['price'])}
|
||||
|
||||
|
||||
class Coinbase(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('api.coinbase.com',
|
||||
'/v2/exchange-rates?currency=BTC')
|
||||
return {ccy: Decimal(rate) for (ccy, rate) in json["data"]["rates"].items()}
|
||||
|
||||
|
||||
class CoinDesk(ExchangeBase):
|
||||
|
||||
async def get_currencies(self):
|
||||
dicts = await self.get_json('api.coindesk.com',
|
||||
'/v1/bpi/supported-currencies.json')
|
||||
return [d['currency'] for d in dicts]
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('api.coindesk.com',
|
||||
'/v1/bpi/currentprice/%s.json' % ccy)
|
||||
result = {ccy: Decimal(json['bpi'][ccy]['rate_float'])}
|
||||
return result
|
||||
|
||||
def history_starts(self):
|
||||
return { 'USD': '2012-11-30', 'EUR': '2013-09-01' }
|
||||
|
||||
def history_ccys(self):
|
||||
return self.history_starts().keys()
|
||||
|
||||
async def request_history(self, ccy):
|
||||
start = self.history_starts()[ccy]
|
||||
end = datetime.today().strftime('%Y-%m-%d')
|
||||
# Note ?currency and ?index don't work as documented. Sigh.
|
||||
query = ('/v1/bpi/historical/close.json?start=%s&end=%s'
|
||||
% (start, end))
|
||||
json = await self.get_json('api.coindesk.com', query)
|
||||
return json['bpi']
|
||||
|
||||
|
||||
class CoinMarketcap(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('pro-api.coinmarketcap.com','/v1/cryptocurrency/quotes/latest?symbol=FLO&CMC_PRO_API_KEY=194ee8a1-5a58-4f3e-ba07-a1d6bc633210&convert=%s' % ccy)
|
||||
result = {ccy: Decimal(json['data']['FLO']['quote'][ccy]['price'])}
|
||||
return result
|
||||
|
||||
|
||||
class Coinsecure(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('api.coinsecure.in', '/v0/noauth/newticker')
|
||||
return {'INR': Decimal(json['lastprice'] / 100.0 )}
|
||||
|
||||
|
||||
class Foxbit(ExchangeBase):
|
||||
|
||||
async def get_rates(self,ccy):
|
||||
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json')
|
||||
return {'BRL': Decimal(json['ticker_1h']['exchanges']['FOX']['last'])}
|
||||
|
||||
|
||||
class itBit(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
ccys = ['USD', 'EUR', 'SGD']
|
||||
json = await self.get_json('api.itbit.com', '/v1/markets/XBT%s/ticker' % ccy)
|
||||
result = dict.fromkeys(ccys)
|
||||
if ccy in ccys:
|
||||
result[ccy] = Decimal(json['lastPrice'])
|
||||
return result
|
||||
|
||||
|
||||
class Kraken(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
ccys = ['EUR', 'USD', 'CAD', 'GBP', 'JPY']
|
||||
pairs = ['XBT%s' % c for c in ccys]
|
||||
json = await self.get_json('api.kraken.com',
|
||||
'/0/public/Ticker?pair=%s' % ','.join(pairs))
|
||||
return dict((k[-3:], Decimal(float(v['c'][0])))
|
||||
for k, v in json['result'].items())
|
||||
|
||||
|
||||
class LocalBitcoins(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('localbitcoins.com',
|
||||
'/bitcoinaverage/ticker-all-currencies/')
|
||||
return dict([(r, Decimal(json[r]['rates']['last'])) for r in json])
|
||||
|
||||
|
||||
class MercadoBitcoin(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json')
|
||||
return {'BRL': Decimal(json['ticker_1h']['exchanges']['MBT']['last'])}
|
||||
|
||||
|
||||
class NegocieCoins(ExchangeBase):
|
||||
|
||||
async def get_rates(self,ccy):
|
||||
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json')
|
||||
return {'BRL': Decimal(json['ticker_1h']['exchanges']['NEG']['last'])}
|
||||
|
||||
class TheRockTrading(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('api.therocktrading.com',
|
||||
'/v1/funds/BTCEUR/ticker')
|
||||
return {'EUR': Decimal(json['last'])}
|
||||
|
||||
class Unocoin(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('www.unocoin.com', 'trade?buy')
|
||||
return {'INR': Decimal(json)}
|
||||
|
||||
|
||||
class WEX(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json_eur = await self.get_json('wex.nz', '/api/3/ticker/btc_eur')
|
||||
json_rub = await self.get_json('wex.nz', '/api/3/ticker/btc_rur')
|
||||
json_usd = await self.get_json('wex.nz', '/api/3/ticker/btc_usd')
|
||||
return {'EUR': Decimal(json_eur['btc_eur']['last']),
|
||||
'RUB': Decimal(json_rub['btc_rur']['last']),
|
||||
'USD': Decimal(json_usd['btc_usd']['last'])}
|
||||
|
||||
|
||||
class Winkdex(ExchangeBase):
|
||||
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('winkdex.com', '/api/v0/price')
|
||||
return {'USD': Decimal(json['price'] / 100.0)}
|
||||
|
||||
def history_ccys(self):
|
||||
return ['USD']
|
||||
|
||||
async def request_history(self, ccy):
|
||||
json = await self.get_json('winkdex.com',
|
||||
"/api/v0/series?start_time=1342915200")
|
||||
history = json['series'][0]['results']
|
||||
return dict([(h['timestamp'][:10], h['price'] / 100.0)
|
||||
for h in history])
|
||||
|
||||
|
||||
class Zaif(ExchangeBase):
|
||||
async def get_rates(self, ccy):
|
||||
json = await self.get_json('api.zaif.jp', '/api/1/last_price/btc_jpy')
|
||||
return {'JPY': Decimal(json['last_price'])}
|
||||
|
||||
|
||||
def dictinvert(d):
|
||||
inv = {}
|
||||
for k, vlist in d.items():
|
||||
for v in vlist:
|
||||
keys = inv.setdefault(v, [])
|
||||
keys.append(k)
|
||||
return inv
|
||||
|
||||
def get_exchanges_and_currencies():
|
||||
path = resource_path('currencies.json')
|
||||
try:
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
return json.loads(f.read())
|
||||
except:
|
||||
pass
|
||||
d = {}
|
||||
is_exchange = lambda obj: (inspect.isclass(obj)
|
||||
and issubclass(obj, ExchangeBase)
|
||||
and obj != ExchangeBase)
|
||||
exchanges = dict(inspect.getmembers(sys.modules[__name__], is_exchange))
|
||||
for name, klass in exchanges.items():
|
||||
exchange = klass(None, None)
|
||||
try:
|
||||
d[name] = exchange.get_currencies()
|
||||
print(name, "ok")
|
||||
except:
|
||||
print(name, "error")
|
||||
continue
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
f.write(json.dumps(d, indent=4, sort_keys=True))
|
||||
return d
|
||||
|
||||
|
||||
CURRENCIES = get_exchanges_and_currencies()
|
||||
|
||||
|
||||
def get_exchanges_by_ccy(history=True):
|
||||
if not history:
|
||||
return dictinvert(CURRENCIES)
|
||||
d = {}
|
||||
exchanges = CURRENCIES.keys()
|
||||
for name in exchanges:
|
||||
klass = globals()[name]
|
||||
exchange = klass(None, None)
|
||||
d[name] = exchange.history_ccys()
|
||||
return dictinvert(d)
|
||||
|
||||
|
||||
class FxThread(ThreadJob):
|
||||
|
||||
def __init__(self, config: SimpleConfig, network: Network):
|
||||
self.config = config
|
||||
self.network = network
|
||||
if self.network:
|
||||
self.network.register_callback(self.set_proxy, ['proxy_set'])
|
||||
self.ccy = self.get_currency()
|
||||
self.history_used_spot = False
|
||||
self.ccy_combo = None
|
||||
self.hist_checkbox = None
|
||||
self.cache_dir = os.path.join(config.path, 'cache')
|
||||
self._trigger = asyncio.Event()
|
||||
self._trigger.set()
|
||||
self.set_exchange(self.config_exchange())
|
||||
make_dir(self.cache_dir)
|
||||
|
||||
def set_proxy(self, trigger_name, *args):
|
||||
self._trigger.set()
|
||||
|
||||
@staticmethod
|
||||
def get_currencies(history: bool) -> Sequence[str]:
|
||||
d = get_exchanges_by_ccy(history)
|
||||
return sorted(d.keys())
|
||||
|
||||
@staticmethod
|
||||
def get_exchanges_by_ccy(ccy: str, history: bool) -> Sequence[str]:
|
||||
d = get_exchanges_by_ccy(history)
|
||||
return d.get(ccy, [])
|
||||
|
||||
@staticmethod
|
||||
def remove_thousands_separator(text):
|
||||
return text.replace(',', '') # FIXME use THOUSAND_SEPARATOR in util
|
||||
|
||||
def ccy_amount_str(self, amount, commas):
|
||||
prec = CCY_PRECISIONS.get(self.ccy, 2)
|
||||
fmt_str = "{:%s.%df}" % ("," if commas else "", max(0, prec)) # FIXME use util.THOUSAND_SEPARATOR and util.DECIMAL_POINT
|
||||
try:
|
||||
rounded_amount = round(amount, prec)
|
||||
except decimal.InvalidOperation:
|
||||
rounded_amount = amount
|
||||
return fmt_str.format(rounded_amount)
|
||||
|
||||
async def run(self):
|
||||
while True:
|
||||
try:
|
||||
await asyncio.wait_for(self._trigger.wait(), 150)
|
||||
except concurrent.futures.TimeoutError:
|
||||
pass
|
||||
else:
|
||||
self._trigger.clear()
|
||||
if self.is_enabled():
|
||||
if self.show_history():
|
||||
self.exchange.get_historical_rates(self.ccy, self.cache_dir)
|
||||
if self.is_enabled():
|
||||
self.exchange.update(self.ccy)
|
||||
|
||||
def is_enabled(self):
|
||||
return bool(self.config.get('use_exchange_rate'))
|
||||
|
||||
def set_enabled(self, b):
|
||||
self.config.set_key('use_exchange_rate', bool(b))
|
||||
self.trigger_update()
|
||||
|
||||
def get_history_config(self):
|
||||
return bool(self.config.get('history_rates'))
|
||||
|
||||
def set_history_config(self, b):
|
||||
self.config.set_key('history_rates', bool(b))
|
||||
|
||||
def get_history_capital_gains_config(self):
|
||||
return bool(self.config.get('history_rates_capital_gains', False))
|
||||
|
||||
def set_history_capital_gains_config(self, b):
|
||||
self.config.set_key('history_rates_capital_gains', bool(b))
|
||||
|
||||
def get_fiat_address_config(self):
|
||||
return bool(self.config.get('fiat_address'))
|
||||
|
||||
def set_fiat_address_config(self, b):
|
||||
self.config.set_key('fiat_address', bool(b))
|
||||
|
||||
def get_currency(self):
|
||||
'''Use when dynamic fetching is needed'''
|
||||
return self.config.get("currency", "EUR")
|
||||
|
||||
def config_exchange(self):
|
||||
return self.config.get('use_exchange', 'CoinMarketcap')
|
||||
|
||||
def show_history(self):
|
||||
return self.is_enabled() and self.get_history_config() and self.ccy in self.exchange.history_ccys()
|
||||
|
||||
def set_currency(self, ccy):
|
||||
self.ccy = ccy
|
||||
self.config.set_key('currency', ccy, True)
|
||||
self.trigger_update()
|
||||
self.on_quotes()
|
||||
|
||||
def trigger_update(self):
|
||||
if self.network:
|
||||
self.network.asyncio_loop.call_soon_threadsafe(self._trigger.set)
|
||||
|
||||
def set_exchange(self, name):
|
||||
class_ = globals().get(name, BitcoinAverage)
|
||||
self.print_error("using exchange", name)
|
||||
if self.config_exchange() != name:
|
||||
self.config.set_key('use_exchange', name, True)
|
||||
self.exchange = class_(self.on_quotes, self.on_history)
|
||||
# A new exchange means new fx quotes, initially empty. Force
|
||||
# a quote refresh
|
||||
self.trigger_update()
|
||||
self.exchange.read_historical_rates(self.ccy, self.cache_dir)
|
||||
|
||||
def on_quotes(self):
|
||||
if self.network:
|
||||
self.network.trigger_callback('on_quotes')
|
||||
|
||||
def on_history(self):
|
||||
if self.network:
|
||||
self.network.trigger_callback('on_history')
|
||||
|
||||
def exchange_rate(self) -> Decimal:
|
||||
"""Returns the exchange rate as a Decimal"""
|
||||
rate = self.exchange.quotes.get(self.ccy)
|
||||
if rate is None:
|
||||
return Decimal('NaN')
|
||||
return Decimal(rate)
|
||||
|
||||
def format_amount(self, btc_balance):
|
||||
rate = self.exchange_rate()
|
||||
return '' if rate.is_nan() else "%s" % self.value_str(btc_balance, rate)
|
||||
|
||||
def format_amount_and_units(self, btc_balance):
|
||||
rate = self.exchange_rate()
|
||||
return '' if rate.is_nan() else "%s %s" % (self.value_str(btc_balance, rate), self.ccy)
|
||||
|
||||
def get_fiat_status_text(self, btc_balance, base_unit, decimal_point):
|
||||
rate = self.exchange_rate()
|
||||
return _(" (No FX rate available)") if rate.is_nan() else " 1 %s~%s %s" % (base_unit,
|
||||
self.value_str(COIN / (10**(8 - decimal_point)), rate), self.ccy)
|
||||
|
||||
def fiat_value(self, satoshis, rate):
|
||||
return Decimal('NaN') if satoshis is None else Decimal(satoshis) / COIN * Decimal(rate)
|
||||
|
||||
def value_str(self, satoshis, rate):
|
||||
return self.format_fiat(self.fiat_value(satoshis, rate))
|
||||
|
||||
def format_fiat(self, value):
|
||||
if value.is_nan():
|
||||
return _("No data")
|
||||
return "%s" % (self.ccy_amount_str(value, True))
|
||||
|
||||
def history_rate(self, d_t):
|
||||
if d_t is None:
|
||||
return Decimal('NaN')
|
||||
rate = self.exchange.historical_rate(self.ccy, d_t)
|
||||
# Frequently there is no rate for today, until tomorrow :)
|
||||
# Use spot quotes in that case
|
||||
if rate == 'NaN' and (datetime.today().date() - d_t.date()).days <= 2:
|
||||
rate = self.exchange.quotes.get(self.ccy, 'NaN')
|
||||
self.history_used_spot = True
|
||||
return Decimal(rate)
|
||||
|
||||
def historical_value_str(self, satoshis, d_t):
|
||||
return self.format_fiat(self.historical_value(satoshis, d_t))
|
||||
|
||||
def historical_value(self, satoshis, d_t):
|
||||
return self.fiat_value(satoshis, self.history_rate(d_t))
|
||||
|
||||
def timestamp_rate(self, timestamp):
|
||||
from .util import timestamp_to_datetime
|
||||
date = timestamp_to_datetime(timestamp)
|
||||
return self.history_rate(date)
|
||||
|
Before Width: | Height: | Size: 687 B |
|
Before Width: | Height: | Size: 1.3 KiB |
|
Before Width: | Height: | Size: 7.4 KiB |
|
Before Width: | Height: | Size: 8.3 KiB |
|
Before Width: | Height: | Size: 7.1 KiB |
|
Before Width: | Height: | Size: 7.3 KiB |
|
Before Width: | Height: | Size: 9.8 KiB |
|
Before Width: | Height: | Size: 528 B |
|
Before Width: | Height: | Size: 788 B |
|
Before Width: | Height: | Size: 53 KiB |
@ -1,44 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- https://commons.wikimedia.org/wiki/File:CrystalClearActionApply.svg -->
|
||||
<svg width="512" height="512" viewBox="0 0 128 128" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<defs>
|
||||
<linearGradient id="linearGradient3930">
|
||||
<stop style="stop-color:#ffffff;stop-opacity:1" offset="0"/>
|
||||
<stop style="stop-color:#b3d187;stop-opacity:1" offset="0.53316939"/>
|
||||
<stop style="stop-color:#28f400;stop-opacity:1" offset="1"/>
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="linearGradient3904">
|
||||
<stop style="stop-color:#4df60b;stop-opacity:1" offset="0"/>
|
||||
<stop style="stop-color:#008000;stop-opacity:1" offset="1"/>
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="linearGradient3878">
|
||||
<stop style="stop-color:#79ef39;stop-opacity:1" offset="0"/>
|
||||
<stop style="stop-color:#e9ffe3;stop-opacity:1" offset="1"/>
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="linearGradient3044">
|
||||
<stop style="stop-color:#f9ffd1;stop-opacity:1" offset="0"/>
|
||||
<stop style="stop-color:#84e246;stop-opacity:1" offset="0.25998953"/>
|
||||
<stop style="stop-color:#008000;stop-opacity:1" offset="1"/>
|
||||
</linearGradient>
|
||||
<radialGradient cx="60.764378" cy="104.22466" r="63.17857" fx="60.764378" fy="104.22466" id="radialGradient3838" xlink:href="#linearGradient3044" gradientUnits="userSpaceOnUse" gradientTransform="matrix(0.85744972,-0.78795737,0.89944031,0.97876469,-84.596269,34.939755)"/>
|
||||
<radialGradient cx="145" cy="29" r="230" id="radialGradient3838-0" gradientUnits="userSpaceOnUse" gradientTransform="matrix(0.66070117,-0.56004574,0.69392651,0.81864398,-39.045089,37.984063)">
|
||||
<stop style="stop-color:#FFFFFF;stop-opacity:1" offset="0"/>
|
||||
<stop style="stop-color:#FFFFFF;stop-opacity:1" offset="0.20"/>
|
||||
<stop style="stop-color:#8BE456;stop-opacity:1" offset="0.51"/>
|
||||
<stop style="stop-color:#8BE456;stop-opacity:1" offset="0.74"/>
|
||||
<stop style="stop-color:#8BE456;stop-opacity:1" offset="1"/>
|
||||
</radialGradient>
|
||||
<radialGradient cx="48.356026" cy="122.04626" r="63.17857" fx="48.356026" fy="122.04626" id="radialGradient3838-4" xlink:href="#linearGradient3904" gradientUnits="userSpaceOnUse" gradientTransform="matrix(0.97494521,-0.22244513,0.1978519,0.86715661,-18.612993,12.209071)"/>
|
||||
<radialGradient cx="57.965954" cy="109.5996" r="63.17857" fx="57.965954" fy="109.5996" id="radialGradient3838-5" xlink:href="#linearGradient3930" gradientUnits="userSpaceOnUse" gradientTransform="matrix(0.77328328,-0.70601122,0.71173774,0.77955545,-64.080277,49.199946)"/>
|
||||
</defs>
|
||||
<g>
|
||||
<path d="M 24.642857,32.642859 1.4285714,54.07143 51.071429,97.285716 126.78571,31.57143 l -22.5,-20 -52.142853,47.142858 z" id="path2997-1" style="fill:url(#radialGradient3838);fill-opacity:1;stroke:none"/>
|
||||
<path d="M 1.4285714,54.428573 51.071429,97.64286 126.78571,31.928573 126.86922,40.559647 51.78571,108.71429 1.77525,63.026751 z" id="path2997-7" style="color:#000000;fill:url(#radialGradient3838-4);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1px;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"/>
|
||||
<path d="m 24.285714,39.785715 27.5,25.000001 52.142856,-46.428569 15.71429,14.64285 c 0,0 -14.28571,12.857149 -28.928573,25.714289 -14.642854,12.85714 -54.642859,15 -80.7142871,-5.71429 C 20.357143,43.714286 24.285714,39.785715 24.285714,39.785715 z" id="path3042" style="color:#000000;fill:url(#radialGradient3838-0);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1px;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"/>
|
||||
<path d="M 1.4285714,54.07143 51.071429,97.285716 126.78571,31.57143 51.071424,101.21429 z" id="path2997" style="color:#000000;fill:url(#radialGradient3838-5);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1px;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 4.1 KiB |
|
Before Width: | Height: | Size: 39 KiB |
|
Before Width: | Height: | Size: 50 KiB |
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 144 KiB |
|
Before Width: | Height: | Size: 2.8 KiB |
|
Before Width: | Height: | Size: 4.7 KiB |
|
Before Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 39 KiB |
|
Before Width: | Height: | Size: 463 B |