Compare commits
1 Commits
master
...
check_wall
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dba9eb9b3a |
14
.gitignore
vendored
@ -4,24 +4,20 @@
|
||||
build/
|
||||
dist/
|
||||
*.egg/
|
||||
/electrum.py
|
||||
contrib/pyinstaller/
|
||||
Electrum.egg-info/
|
||||
electrum/locale/
|
||||
gui/qt/icons_rc.py
|
||||
locale/
|
||||
.devlocaltmp/
|
||||
*_trial_temp
|
||||
packages
|
||||
env/
|
||||
.tox/
|
||||
.buildozer/
|
||||
bin/
|
||||
/app.fil
|
||||
.idea
|
||||
|
||||
# icons
|
||||
electrum/gui/kivy/theming/light-0.png
|
||||
electrum/gui/kivy/theming/light.atlas
|
||||
|
||||
# tests/tox
|
||||
.tox/
|
||||
# tox files
|
||||
.cache/
|
||||
.coverage
|
||||
.pytest_cache
|
||||
|
||||
6
.gitmodules
vendored
@ -1,6 +1,6 @@
|
||||
[submodule "contrib/deterministic-build/electrum-icons"]
|
||||
path = contrib/deterministic-build/electrum-icons
|
||||
url = https://github.com/spesmilo/electrum-icons
|
||||
[submodule "contrib/deterministic-build/electrum-locale"]
|
||||
path = contrib/deterministic-build/electrum-locale
|
||||
url = https://github.com/spesmilo/electrum-locale
|
||||
[submodule "contrib/CalinsQRReader"]
|
||||
path = contrib/osx/CalinsQRReader
|
||||
url = https://github.com/spesmilo/CalinsQRReader
|
||||
|
||||
58
.travis.yml
@ -1,19 +1,14 @@
|
||||
sudo: true
|
||||
dist: xenial
|
||||
sudo: false
|
||||
language: python
|
||||
python:
|
||||
- 3.5
|
||||
- 3.6
|
||||
- 3.7
|
||||
git:
|
||||
depth: false
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- sourceline: 'ppa:tah83/secp256k1'
|
||||
packages:
|
||||
- libsecp256k1-0
|
||||
before_install:
|
||||
- git tag
|
||||
install:
|
||||
- pip install -r contrib/requirements/requirements-travis.txt
|
||||
cache:
|
||||
@ -23,12 +18,11 @@ cache:
|
||||
script:
|
||||
- tox
|
||||
after_success:
|
||||
- if [ "$TRAVIS_BRANCH" = "master" ]; then pip install requests && contrib/make_locale; fi
|
||||
- if [ "$TRAVIS_BRANCH" = "master" ]; then pip install pycurl requests && contrib/make_locale; fi
|
||||
- coveralls
|
||||
jobs:
|
||||
include:
|
||||
- stage: binary builds
|
||||
name: "Windows build"
|
||||
sudo: true
|
||||
language: c
|
||||
python: false
|
||||
@ -37,58 +31,26 @@ jobs:
|
||||
services:
|
||||
- docker
|
||||
install:
|
||||
- sudo docker build --no-cache -t electrum-wine-builder-img ./contrib/build-wine/docker/
|
||||
- sudo docker build --no-cache -t electrum-wine-builder-img ./contrib/build-wine/docker/
|
||||
script:
|
||||
- sudo docker run --name electrum-wine-builder-cont -v $PWD:/opt/wine64/drive_c/electrum --rm --workdir /opt/wine64/drive_c/electrum/contrib/build-wine electrum-wine-builder-img ./build.sh
|
||||
- sudo docker run --name electrum-wine-builder-cont -v $PWD:/opt/electrum --rm --workdir /opt/electrum/contrib/build-wine electrum-wine-builder-img ./build.sh $TRAVIS_COMMIT
|
||||
after_success: true
|
||||
- name: "Android build"
|
||||
language: python
|
||||
python: 3.7
|
||||
env:
|
||||
# reset API key to not have make_locale upload stuff here
|
||||
- crowdin_api_key=
|
||||
services:
|
||||
- docker
|
||||
install:
|
||||
- pip install requests && ./contrib/make_locale
|
||||
- ./contrib/make_packages
|
||||
- sudo docker build --no-cache -t electrum-android-builder-img electrum/gui/kivy/tools
|
||||
script:
|
||||
- sudo chown -R 1000:1000 .
|
||||
# Output something every minute or Travis kills the job
|
||||
- while sleep 60; do echo "=====[ $SECONDS seconds still running ]====="; done &
|
||||
- sudo docker run -it -u 1000:1000 --rm --name electrum-android-builder-cont -v $PWD:/home/user/wspace/electrum --workdir /home/user/wspace/electrum electrum-android-builder-img ./contrib/make_apk
|
||||
# kill background sleep loop
|
||||
- kill %1
|
||||
- ls -la bin
|
||||
- if [ $(ls bin | grep -c Electrum-*) -eq 0 ]; then exit 1; fi
|
||||
after_success: true
|
||||
- name: "MacOS build"
|
||||
os: osx
|
||||
- os: osx
|
||||
language: c
|
||||
env:
|
||||
- TARGET_OS=macOS
|
||||
python: false
|
||||
install:
|
||||
- git fetch --all --tags
|
||||
script: ./contrib/osx/make_osx
|
||||
- git fetch origin --unshallow
|
||||
script: ./contrib/build-osx/make_osx
|
||||
after_script: ls -lah dist && md5 dist/*
|
||||
after_success: true
|
||||
- name: "AppImage build"
|
||||
sudo: true
|
||||
language: c
|
||||
python: false
|
||||
services:
|
||||
- docker
|
||||
install:
|
||||
- sudo docker build --no-cache -t electrum-appimage-builder-img ./contrib/build-linux/appimage/
|
||||
script:
|
||||
- sudo docker run --name electrum-appimage-builder-cont -v $PWD:/opt/electrum --rm --workdir /opt/electrum/contrib/build-linux/appimage electrum-appimage-builder-img ./build.sh
|
||||
after_success: true
|
||||
- stage: release check
|
||||
install:
|
||||
- git fetch --all --tags
|
||||
- git fetch origin --unshallow
|
||||
script:
|
||||
- ./contrib/deterministic-build/check_submodules.sh
|
||||
after_success: true
|
||||
if: tag IS present
|
||||
if: tag IS present
|
||||
13
AUTHORS
@ -1,5 +1,3 @@
|
||||
Electrum-BTC
|
||||
------------
|
||||
ThomasV - Creator and maintainer.
|
||||
Animazing / Tachikoma - Styled the new GUI. Mac version.
|
||||
Azelphur - GUI stuff.
|
||||
@ -11,13 +9,4 @@ Genjix - Porting pro-mode functionality to lite-gui and worked on server
|
||||
Slush - Work on the server. Designed the original Stratum spec.
|
||||
Julian Toash (Tuxavant) - Various fixes to the client.
|
||||
rdymac - Website and translations.
|
||||
kyuupichan - Miscellaneous.
|
||||
|
||||
|
||||
FLO-Electrum
|
||||
------------
|
||||
vivekteega - Maintainer and remaining stuff
|
||||
Bitspill - Bootstraped the project with core FLO changes
|
||||
Rohit Tripathy - Ideation and problem solving
|
||||
akhil2015 - Flodata and scrypt hashing
|
||||
|
||||
kyuupichan - Miscellaneous.
|
||||
22
Info.plist
Normal file
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>bitcoin</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>bitcoin</string>
|
||||
</array>
|
||||
</dict>
|
||||
</array>
|
||||
<key>LSArchitecturePriority</key>
|
||||
<array>
|
||||
<string>x86_64</string>
|
||||
<string>i386</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
||||
16
MANIFEST.in
@ -1,17 +1,17 @@
|
||||
include LICENCE RELEASE-NOTES AUTHORS
|
||||
include README.rst
|
||||
include electrum.conf.sample
|
||||
include electrum.desktop
|
||||
include *.py
|
||||
include run_electrum
|
||||
include electrum
|
||||
include contrib/requirements/requirements.txt
|
||||
include contrib/requirements/requirements-hw.txt
|
||||
recursive-include lib *.py
|
||||
recursive-include gui *.py
|
||||
recursive-include plugins *.py
|
||||
recursive-include packages *.py
|
||||
recursive-include packages cacert.pem
|
||||
include icons.qrc
|
||||
recursive-include icons *
|
||||
recursive-include scripts *
|
||||
|
||||
graft electrum
|
||||
prune electrum/tests
|
||||
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[co~]
|
||||
global-exclude *.py.orig
|
||||
global-exclude *.py.rej
|
||||
|
||||
42
README.rst
@ -5,7 +5,7 @@ Electrum - Lightweight Bitcoin client
|
||||
|
||||
Licence: MIT Licence
|
||||
Author: Thomas Voegtlin
|
||||
Language: Python (>= 3.6)
|
||||
Language: Python
|
||||
Homepage: https://electrum.org/
|
||||
|
||||
|
||||
@ -15,9 +15,9 @@ Electrum - Lightweight Bitcoin client
|
||||
.. image:: https://coveralls.io/repos/github/spesmilo/electrum/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/spesmilo/electrum?branch=master
|
||||
:alt: Test coverage statistics
|
||||
.. image:: https://d322cqt584bo4o.cloudfront.net/electrum/localized.svg
|
||||
.. image:: https://img.shields.io/badge/help-translating-blue.svg
|
||||
:target: https://crowdin.com/project/electrum
|
||||
:alt: Help translate Electrum online
|
||||
:alt: Help translating Electrum online
|
||||
|
||||
|
||||
|
||||
@ -32,19 +32,19 @@ Qt interface, install the Qt dependencies::
|
||||
sudo apt-get install python3-pyqt5
|
||||
|
||||
If you downloaded the official package (tar.gz), you can run
|
||||
Electrum from its root directory without installing it on your
|
||||
Electrum from its root directory, without installing it on your
|
||||
system; all the python dependencies are included in the 'packages'
|
||||
directory. To run Electrum from its root directory, just do::
|
||||
|
||||
./run_electrum
|
||||
./electrum
|
||||
|
||||
You can also install Electrum on your system, by running this command::
|
||||
|
||||
sudo apt-get install python3-setuptools
|
||||
python3 -m pip install .[fast]
|
||||
pip3 install .[fast]
|
||||
|
||||
This will download and install the Python dependencies used by
|
||||
Electrum instead of using the 'packages' directory.
|
||||
Electrum, instead of using the 'packages' directory.
|
||||
The 'fast' extra contains some optional dependencies that we think
|
||||
are often useful but they are not strictly needed.
|
||||
|
||||
@ -64,13 +64,21 @@ Check out the code from GitHub::
|
||||
|
||||
Run install (this should install dependencies)::
|
||||
|
||||
python3 -m pip install .[fast]
|
||||
pip3 install .[fast]
|
||||
|
||||
Render the SVG icons to PNGs (optional)::
|
||||
|
||||
for i in lock unlock confirmed status_lagging status_disconnected status_connected_proxy status_connected status_waiting preferences; do convert -background none icons/$i.svg icons/$i.png; done
|
||||
|
||||
Compile the icons file for Qt::
|
||||
|
||||
sudo apt-get install pyqt5-dev-tools
|
||||
pyrcc5 icons.qrc -o gui/qt/icons_rc.py
|
||||
|
||||
Compile the protobuf description file::
|
||||
|
||||
sudo apt-get install protobuf-compiler
|
||||
protoc --proto_path=electrum --python_out=electrum electrum/paymentrequest.proto
|
||||
protoc --proto_path=lib/ --python_out=lib/ lib/paymentrequest.proto
|
||||
|
||||
Create translations (optional)::
|
||||
|
||||
@ -83,25 +91,25 @@ Create translations (optional)::
|
||||
Creating Binaries
|
||||
=================
|
||||
|
||||
Linux
|
||||
-----
|
||||
|
||||
See :code:`contrib/build-linux/README.md`.
|
||||
To create binaries, create the 'packages' directory::
|
||||
|
||||
./contrib/make_packages
|
||||
|
||||
This directory contains the python dependencies used by Electrum.
|
||||
|
||||
Mac OS X / macOS
|
||||
----------------
|
||||
|
||||
See :code:`contrib/osx/README.md`.
|
||||
--------
|
||||
|
||||
See `contrib/build-osx/`.
|
||||
|
||||
Windows
|
||||
-------
|
||||
|
||||
See :code:`contrib/build-wine/docker/README.md`.
|
||||
See `contrib/build-wine/`.
|
||||
|
||||
|
||||
Android
|
||||
-------
|
||||
|
||||
See :code:`electrum/gui/kivy/Readme.md`.
|
||||
See `gui/kivy/Readme.txt` file.
|
||||
|
||||
102
RELEASE-NOTES
@ -1,95 +1,3 @@
|
||||
# Release 3.3.4 - (February 13, 2019)
|
||||
|
||||
* AppImage: we now also distribute self-contained binaries for x86_64
|
||||
Linux in the form of an AppImage (#5042). The Python interpreter,
|
||||
PyQt5, libsecp256k1, PyCryptodomex, zbar, hidapi/libusb (including
|
||||
hardware wallet libraries) are all bundled. Note that users of
|
||||
hw wallets still need to set udev rules themselves.
|
||||
* hw wallets: fix a regression during transaction signing that prompts
|
||||
the user too many times for confirmations (commit 2729909)
|
||||
* transactions now set nVersion to 2, to mimic Bitcoin Core
|
||||
* fix Qt bug that made all hw wallets unusable on Windows 8.1 (#4960)
|
||||
* fix bugs in wallet creation wizard that resulted in corrupted
|
||||
wallets being created in rare cases (#5082, #5057)
|
||||
* fix compatibility with Qt 5.12 (#5109)
|
||||
|
||||
|
||||
# Release 3.3.3 - (January 25, 2019)
|
||||
|
||||
* Do not expose users to server error messages (#4968)
|
||||
* Notify users of new releases. Release announcements must be signed,
|
||||
and they are verified byElectrum using a hardcoded Bitcoin address.
|
||||
* Hardware wallet fixes (#4991, #4993, #5006)
|
||||
* Display only QR code in QRcode Window
|
||||
* Fixed code signing on MacOS
|
||||
* Randomise locktime of transactions
|
||||
|
||||
|
||||
# Release 3.3.2 - (December 21, 2018)
|
||||
|
||||
* Fix Qt history export bug
|
||||
* Improve network timeouts
|
||||
* Prepend server transaction_broadcast error messages with
|
||||
explanatory message. Render error messages as plain text.
|
||||
|
||||
|
||||
# Release 3.3.1 - (December 20, 2018)
|
||||
|
||||
* Qt: Fix invoices tab crash (#4941)
|
||||
* Android: Minor GUI improvements
|
||||
|
||||
|
||||
# Release 3.3.0 - Hodler's Edition (December 19, 2018)
|
||||
|
||||
* The network layer has been rewritten using asyncio and aiorpcx.
|
||||
In addition to easier maintenance, this makes the client
|
||||
more robust against misbehaving servers.
|
||||
* The minimum python version was increased to 3.6
|
||||
* The blockchain headers and fork handling logic has been generalized.
|
||||
Clients by default now follow chain based on most work, not length.
|
||||
* New wallet creation defaults to native segwit (bech32).
|
||||
* Segwit 2FA: TrustedCoin now supports native segwit p2wsh
|
||||
two-factor wallets.
|
||||
* RBF batching (opt-in): If the wallet has an unconfirmed RBF
|
||||
transaction, new payments will be added to that transaction,
|
||||
instead of creating new transactions.
|
||||
* MacOS: support QR code scanner in binaries.
|
||||
* Android APK:
|
||||
- build using Google NDK instead of Crystax NDK
|
||||
- target API 28
|
||||
- do not use external storage (previously for block headers)
|
||||
* hardware wallets:
|
||||
- Coldcard now supports spending from p2wpkh-p2sh,
|
||||
fixed p2pkh signing for fw 1.1.0
|
||||
- Archos Safe-T mini: fix #4726 signing issue
|
||||
- KeepKey: full segwit support
|
||||
- Trezor: refactoring and compat with python-trezor 0.11
|
||||
- Digital BitBox: support firmware v5.0.0
|
||||
* fix bitcoin URI handling when app already running (#4796)
|
||||
* Qt listings rewritten:
|
||||
the History tab now uses QAbstractItemModel, the other tabs use
|
||||
QStandardItemModel. Performance should be better for large wallets.
|
||||
* Several other minor bugfixes and usability improvements.
|
||||
|
||||
|
||||
# Release 3.2.3 - (September 3, 2018)
|
||||
|
||||
* hardware wallet: the Safe-T mini from Archos is now supported.
|
||||
* hardware wallet: the Coldcard from Coinkite is now supported.
|
||||
* BIP39 seeds: if a seed extension (aka passphrase) contained
|
||||
multiple consecutive whitespaces or leading/trailing whitespaces
|
||||
then the derived addresses were not following spec. This has been
|
||||
fixed, and affected should move their coins. The wizard will show a
|
||||
warning in this case. (#4566)
|
||||
* Revealer: the PRNG used has been changed (#4649)
|
||||
* fix Linux distributables: 'typing' was not bundled, needed for python 3.4
|
||||
* fix #4626: fix spending from segwit multisig wallets involving a Trezor
|
||||
cosigner when using a custom derivation path
|
||||
* fix #4491: on Android, if user had set "uBTC" as base unit, app crashed
|
||||
* fix #4497: on Android, paying bip70 invoices from cold start did not work
|
||||
* Several other minor bugfixes and usability improvements.
|
||||
|
||||
|
||||
# Release 3.2.2 - (July 2nd, 2018)
|
||||
|
||||
* Fix DNS resolution on Windows
|
||||
@ -297,7 +205,7 @@ issue #3374. Users should upgrade to 3.0.5.
|
||||
* Qt GUI: sweeping now uses the Send tab, allowing fees to be set
|
||||
* Windows: if using the installer binary, there is now a separate shortcut
|
||||
for "Electrum Testnet"
|
||||
* Digital Bitbox: added support for p2sh-segwit
|
||||
* Digital Bitbox: added suport for p2sh-segwit
|
||||
* OS notifications for incoming transactions
|
||||
* better transaction size estimation:
|
||||
- fees for segwit txns were somewhat underestimated (#3347)
|
||||
@ -525,7 +433,7 @@ issue #3374. Users should upgrade to 3.0.5.
|
||||
|
||||
# Release 2.7.7
|
||||
* Fix utf8 encoding bug with old wallet seeds (issue #1967)
|
||||
* Fix delete request from menu (issue #1968)
|
||||
* Fix delete request from menu (isue #1968)
|
||||
|
||||
# Release 2.7.6
|
||||
* Fixes a critical bug with imported private keys (issue #1966). Keys
|
||||
@ -888,7 +796,7 @@ issue #3374. Users should upgrade to 3.0.5.
|
||||
* New 'Receive' tab in the GUI:
|
||||
- create and manage payment requests, with QR Codes
|
||||
- the former 'Receive' tab was renamed to 'Addresses'
|
||||
- the former Point of Sale plugin is replaced by a resizable
|
||||
- the former Point of Sale plugin is replaced by a resizeable
|
||||
window that pops up if you click on the QR code
|
||||
|
||||
* The 'Send' tab in the Qt GUI supports transactions with multiple
|
||||
@ -911,7 +819,7 @@ issue #3374. Users should upgrade to 3.0.5.
|
||||
|
||||
* The client accepts servers with a CA-signed SSL certificate.
|
||||
|
||||
* ECIES encrypt/decrypt methods, available in the GUI and using
|
||||
* ECIES encrypt/decrypt methods, availabe in the GUI and using
|
||||
the command line:
|
||||
encrypt <pubkey> <message>
|
||||
decrypt <pubkey> <message>
|
||||
@ -984,7 +892,7 @@ bugfixes: connection problems, transactions staying unverified
|
||||
|
||||
# Release 1.8.1
|
||||
|
||||
* Notification option when receiving new transactions
|
||||
* Notification option when receiving new tranactions
|
||||
* Confirm dialogue before sending large amounts
|
||||
* Alternative datafile location for non-windows systems
|
||||
* Fix offline wallet creation
|
||||
|
||||
@ -1,20 +0,0 @@
|
||||
Source tarballs
|
||||
===============
|
||||
|
||||
1. Build locale files
|
||||
|
||||
```
|
||||
contrib/make_locale
|
||||
```
|
||||
|
||||
2. Prepare python dependencies used by Electrum.
|
||||
|
||||
```
|
||||
contrib/make_packages
|
||||
```
|
||||
|
||||
3. Create source tarball.
|
||||
|
||||
```
|
||||
contrib/make_tgz
|
||||
```
|
||||
@ -1,25 +0,0 @@
|
||||
FROM ubuntu:14.04@sha256:cac55e5d97fad634d954d00a5c2a56d80576a08dcc01036011f26b88263f1578
|
||||
|
||||
ENV LC_ALL=C.UTF-8 LANG=C.UTF-8
|
||||
|
||||
RUN apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
git \
|
||||
wget \
|
||||
make \
|
||||
autotools-dev \
|
||||
autoconf \
|
||||
libtool \
|
||||
xz-utils \
|
||||
libssl-dev \
|
||||
zlib1g-dev \
|
||||
libffi6 \
|
||||
libffi-dev \
|
||||
libusb-1.0-0-dev \
|
||||
libudev-dev \
|
||||
gettext \
|
||||
libzbar0 \
|
||||
&& \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
apt-get autoremove -y && \
|
||||
apt-get clean
|
||||
@ -1,41 +0,0 @@
|
||||
AppImage binary for Electrum
|
||||
============================
|
||||
|
||||
This assumes an Ubuntu host, but it should not be too hard to adapt to another
|
||||
similar system. The docker commands should be executed in the project's root
|
||||
folder.
|
||||
|
||||
1. Install Docker
|
||||
|
||||
```
|
||||
$ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
|
||||
$ sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
$ sudo apt-get update
|
||||
$ sudo apt-get install -y docker-ce
|
||||
```
|
||||
|
||||
2. Build image
|
||||
|
||||
```
|
||||
$ sudo docker build --no-cache -t electrum-appimage-builder-img contrib/build-linux/appimage
|
||||
```
|
||||
|
||||
3. Build binary
|
||||
|
||||
```
|
||||
$ sudo docker run -it \
|
||||
--name electrum-appimage-builder-cont \
|
||||
-v $PWD:/opt/electrum \
|
||||
--rm \
|
||||
--workdir /opt/electrum/contrib/build-linux/appimage \
|
||||
electrum-appimage-builder-img \
|
||||
./build.sh
|
||||
```
|
||||
|
||||
4. The generated binary is in `./dist`.
|
||||
|
||||
|
||||
## FAQ
|
||||
|
||||
### How can I see what is included in the AppImage?
|
||||
Execute the binary as follows: `./electrum*.AppImage --appimage-extract`
|
||||
@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
APPDIR="$(dirname "$(readlink -e "$0")")"
|
||||
|
||||
export LD_LIBRARY_PATH="${APPDIR}/usr/lib/:${APPDIR}/usr/lib/x86_64-linux-gnu${LD_LIBRARY_PATH+:$LD_LIBRARY_PATH}"
|
||||
export PATH="${APPDIR}/usr/bin:${PATH}"
|
||||
export LDFLAGS="-L${APPDIR}/usr/lib/x86_64-linux-gnu -L${APPDIR}/usr/lib"
|
||||
|
||||
exec "${APPDIR}/usr/bin/python3.6" -s "${APPDIR}/usr/bin/electrum" "$@"
|
||||
@ -1,197 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
PROJECT_ROOT="$(dirname "$(readlink -e "$0")")/../../.."
|
||||
CONTRIB="$PROJECT_ROOT/contrib"
|
||||
DISTDIR="$PROJECT_ROOT/dist"
|
||||
BUILDDIR="$CONTRIB/build-linux/appimage/build/appimage"
|
||||
APPDIR="$BUILDDIR/electrum.AppDir"
|
||||
CACHEDIR="$CONTRIB/build-linux/appimage/.cache/appimage"
|
||||
|
||||
# pinned versions
|
||||
PYTHON_VERSION=3.6.8
|
||||
PKG2APPIMAGE_COMMIT="83483c2971fcaa1cb0c1253acd6c731ef8404381"
|
||||
LIBSECP_VERSION="b408c6a8b287003d1ade5709e6f7bc3c7f1d5be7"
|
||||
|
||||
|
||||
VERSION=`git describe --tags --dirty --always`
|
||||
APPIMAGE="$DISTDIR/electrum-$VERSION-x86_64.AppImage"
|
||||
|
||||
rm -rf "$BUILDDIR"
|
||||
mkdir -p "$APPDIR" "$CACHEDIR" "$DISTDIR"
|
||||
|
||||
|
||||
. "$CONTRIB"/build_tools_util.sh
|
||||
|
||||
|
||||
info "downloading some dependencies."
|
||||
download_if_not_exist "$CACHEDIR/functions.sh" "https://raw.githubusercontent.com/AppImage/pkg2appimage/$PKG2APPIMAGE_COMMIT/functions.sh"
|
||||
verify_hash "$CACHEDIR/functions.sh" "a73a21a6c1d1e15c0a9f47f017ae833873d1dc6aa74a4c840c0b901bf1dcf09c"
|
||||
|
||||
download_if_not_exist "$CACHEDIR/appimagetool" "https://github.com/probonopd/AppImageKit/releases/download/11/appimagetool-x86_64.AppImage"
|
||||
verify_hash "$CACHEDIR/appimagetool" "c13026b9ebaa20a17e7e0a4c818a901f0faba759801d8ceab3bb6007dde00372"
|
||||
|
||||
download_if_not_exist "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" "https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tar.xz"
|
||||
verify_hash "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" "35446241e995773b1bed7d196f4b624dadcadc8429f26282e756b2fb8a351193"
|
||||
|
||||
|
||||
|
||||
info "building python."
|
||||
tar xf "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" -C "$BUILDDIR"
|
||||
(
|
||||
cd "$BUILDDIR/Python-$PYTHON_VERSION"
|
||||
export SOURCE_DATE_EPOCH=1530212462
|
||||
./configure \
|
||||
--cache-file="$CACHEDIR/python.config.cache" \
|
||||
--prefix="$APPDIR/usr" \
|
||||
--enable-ipv6 \
|
||||
--enable-shared \
|
||||
--with-threads \
|
||||
-q
|
||||
make -s
|
||||
make -s install > /dev/null
|
||||
)
|
||||
|
||||
|
||||
info "building libsecp256k1."
|
||||
(
|
||||
git clone https://github.com/bitcoin-core/secp256k1 "$CACHEDIR"/secp256k1 || (cd "$CACHEDIR"/secp256k1 && git pull)
|
||||
cd "$CACHEDIR"/secp256k1
|
||||
git reset --hard "$LIBSECP_VERSION"
|
||||
git clean -f -x -q
|
||||
export SOURCE_DATE_EPOCH=1530212462
|
||||
./autogen.sh
|
||||
echo "LDFLAGS = -no-undefined" >> Makefile.am
|
||||
./configure \
|
||||
--prefix="$APPDIR/usr" \
|
||||
--enable-module-recovery \
|
||||
--enable-experimental \
|
||||
--enable-module-ecdh \
|
||||
--disable-jni \
|
||||
-q
|
||||
make -s
|
||||
make -s install > /dev/null
|
||||
)
|
||||
|
||||
|
||||
appdir_python() {
|
||||
env \
|
||||
PYTHONNOUSERSITE=1 \
|
||||
LD_LIBRARY_PATH="$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu${LD_LIBRARY_PATH+:$LD_LIBRARY_PATH}" \
|
||||
"$APPDIR/usr/bin/python3.6" "$@"
|
||||
}
|
||||
|
||||
python='appdir_python'
|
||||
|
||||
|
||||
info "installing pip."
|
||||
"$python" -m ensurepip
|
||||
|
||||
|
||||
info "preparing electrum-locale."
|
||||
(
|
||||
cd "$PROJECT_ROOT"
|
||||
git submodule update --init
|
||||
|
||||
pushd "$CONTRIB"/deterministic-build/electrum-locale
|
||||
if ! which msgfmt > /dev/null 2>&1; then
|
||||
echo "Please install gettext"
|
||||
exit 1
|
||||
fi
|
||||
for i in ./locale/*; do
|
||||
dir="$PROJECT_ROOT/electrum/$i/LC_MESSAGES"
|
||||
mkdir -p $dir
|
||||
msgfmt --output-file="$dir/electrum.mo" "$i/electrum.po" || true
|
||||
done
|
||||
popd
|
||||
)
|
||||
|
||||
|
||||
info "installing electrum and its dependencies."
|
||||
mkdir -p "$CACHEDIR/pip_cache"
|
||||
"$python" -m pip install --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements.txt"
|
||||
"$python" -m pip install --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements-binaries.txt"
|
||||
"$python" -m pip install --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements-hw.txt"
|
||||
"$python" -m pip install --cache-dir "$CACHEDIR/pip_cache" "$PROJECT_ROOT"
|
||||
|
||||
|
||||
info "copying zbar"
|
||||
cp "/usr/lib/libzbar.so.0" "$APPDIR/usr/lib/libzbar.so.0"
|
||||
|
||||
|
||||
info "desktop integration."
|
||||
cp "$PROJECT_ROOT/electrum.desktop" "$APPDIR/electrum.desktop"
|
||||
cp "$PROJECT_ROOT/electrum/gui/icons/electrum.png" "$APPDIR/electrum.png"
|
||||
|
||||
|
||||
# add launcher
|
||||
cp "$CONTRIB/build-linux/appimage/apprun.sh" "$APPDIR/AppRun"
|
||||
|
||||
info "finalizing AppDir."
|
||||
(
|
||||
export PKG2AICOMMIT="$PKG2APPIMAGE_COMMIT"
|
||||
. "$CACHEDIR/functions.sh"
|
||||
|
||||
cd "$APPDIR"
|
||||
# copy system dependencies
|
||||
# note: temporarily move PyQt5 out of the way so
|
||||
# we don't try to bundle its system dependencies.
|
||||
mv "$APPDIR/usr/lib/python3.6/site-packages/PyQt5" "$BUILDDIR"
|
||||
copy_deps; copy_deps; copy_deps
|
||||
move_lib
|
||||
mv "$BUILDDIR/PyQt5" "$APPDIR/usr/lib/python3.6/site-packages"
|
||||
|
||||
# apply global appimage blacklist to exclude stuff
|
||||
# move usr/include out of the way to preserve usr/include/python3.6m.
|
||||
mv usr/include usr/include.tmp
|
||||
delete_blacklisted
|
||||
mv usr/include.tmp usr/include
|
||||
)
|
||||
|
||||
|
||||
info "stripping binaries from debug symbols."
|
||||
strip_binaries()
|
||||
{
|
||||
chmod u+w -R "$APPDIR"
|
||||
{
|
||||
printf '%s\0' "$APPDIR/usr/bin/python3.6"
|
||||
find "$APPDIR" -type f -regex '.*\.so\(\.[0-9.]+\)?$' -print0
|
||||
} | xargs -0 --no-run-if-empty --verbose -n1 strip
|
||||
}
|
||||
strip_binaries
|
||||
|
||||
remove_emptydirs()
|
||||
{
|
||||
find "$APPDIR" -type d -empty -print0 | xargs -0 --no-run-if-empty rmdir -vp --ignore-fail-on-non-empty
|
||||
}
|
||||
remove_emptydirs
|
||||
|
||||
|
||||
info "removing some unneeded stuff to decrease binary size."
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/test
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/config-3.6m-x86_64-linux-gnu
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/translations/qtwebengine_locales
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/resources/qtwebengine_*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/qml
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Web*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Designer*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Qml*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Quick*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Location*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Test*
|
||||
rm -rf "$APPDIR"/usr/lib/python3.6/site-packages/PyQt5/Qt/lib/libQt5Xml*
|
||||
|
||||
|
||||
info "creating the AppImage."
|
||||
(
|
||||
cd "$BUILDDIR"
|
||||
chmod +x "$CACHEDIR/appimagetool"
|
||||
"$CACHEDIR/appimagetool" --appimage-extract
|
||||
env VERSION="$VERSION" ./squashfs-root/AppRun --no-appstream --verbose "$APPDIR" "$APPIMAGE"
|
||||
)
|
||||
|
||||
|
||||
info "done."
|
||||
ls -la "$DISTDIR"
|
||||
sha256sum "$DISTDIR"/*
|
||||
36
contrib/build-osx/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
Building Mac OS binaries
|
||||
========================
|
||||
|
||||
This guide explains how to build Electrum binaries for macOS systems.
|
||||
|
||||
The build process consists of two steps:
|
||||
|
||||
## 1. Building the binary
|
||||
|
||||
This needs to be done on a system running macOS or OS X. We use El Capitan (10.11.6) as building it on High Sierra
|
||||
makes the binaries incompatible with older versions.
|
||||
|
||||
Before starting, make sure that the Xcode command line tools are installed (e.g. you have `git`).
|
||||
|
||||
|
||||
cd electrum
|
||||
./contrib/build-osx/make_osx
|
||||
|
||||
This creates a folder named Electrum.app.
|
||||
|
||||
## 2. Building the image
|
||||
The usual way to distribute macOS applications is to use image files containing the
|
||||
application. Although these images can be created on a Mac with the built-in `hdiutil`,
|
||||
they are not deterministic.
|
||||
|
||||
Instead, we use the toolchain that Bitcoin uses: genisoimage and libdmg-hfsplus.
|
||||
These tools do not work on macOS, so you need a separate Linux machine (or VM).
|
||||
|
||||
Copy the Electrum.app directory over and install the dependencies, e.g.:
|
||||
|
||||
apt install libcap-dev cmake make gcc faketime
|
||||
|
||||
Then you can just invoke `package.sh` with the path to the app:
|
||||
|
||||
cd electrum
|
||||
./contrib/build-osx/package.sh ~/Electrum.app/
|
||||
12
contrib/build-osx/base.sh
Normal file
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
RED='\033[0;31m'
|
||||
BLUE='\033[0,34m'
|
||||
NC='\033[0m' # No Color
|
||||
function info {
|
||||
printf "\r💬 ${BLUE}INFO:${NC} ${1}\n"
|
||||
}
|
||||
function fail {
|
||||
printf "\r🗯 ${RED}ERROR:${NC} ${1}\n"
|
||||
exit 1
|
||||
}
|
||||
94
contrib/build-osx/make_osx
Executable file
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Parameterize
|
||||
PYTHON_VERSION=3.6.4
|
||||
BUILDDIR=/tmp/electrum-build
|
||||
PACKAGE=Electrum
|
||||
GIT_REPO=https://github.com/spesmilo/electrum
|
||||
LIBSECP_VERSION=452d8e4d2a2f9f1b5be6b02e18f1ba102e5ca0b4
|
||||
|
||||
. $(dirname "$0")/base.sh
|
||||
|
||||
src_dir=$(dirname "$0")
|
||||
cd $src_dir/../..
|
||||
|
||||
export PYTHONHASHSEED=22
|
||||
VERSION=`git describe --tags --dirty`
|
||||
|
||||
which brew > /dev/null 2>&1 || fail "Please install brew from https://brew.sh/ to continue"
|
||||
|
||||
info "Installing Python $PYTHON_VERSION"
|
||||
export PATH="~/.pyenv/bin:~/.pyenv/shims:~/Library/Python/3.6/bin:$PATH"
|
||||
if [ -d "~/.pyenv" ]; then
|
||||
pyenv update
|
||||
else
|
||||
curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash > /dev/null 2>&1
|
||||
fi
|
||||
PYTHON_CONFIGURE_OPTS="--enable-framework" pyenv install -s $PYTHON_VERSION && \
|
||||
pyenv global $PYTHON_VERSION || \
|
||||
fail "Unable to use Python $PYTHON_VERSION"
|
||||
|
||||
|
||||
info "Installing pyinstaller"
|
||||
python3 -m pip install git+https://github.com/ecdsa/pyinstaller@fix_2952 -I --user || fail "Could not install pyinstaller"
|
||||
|
||||
info "Using these versions for building $PACKAGE:"
|
||||
sw_vers
|
||||
python3 --version
|
||||
echo -n "Pyinstaller "
|
||||
pyinstaller --version
|
||||
|
||||
rm -rf ./dist
|
||||
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
rm -rf $BUILDDIR > /dev/null 2>&1
|
||||
mkdir $BUILDDIR
|
||||
|
||||
cp -R ./contrib/deterministic-build/electrum-locale/locale/ ./lib/locale/
|
||||
cp ./contrib/deterministic-build/electrum-icons/icons_rc.py ./gui/qt/
|
||||
|
||||
|
||||
info "Downloading libusb..."
|
||||
curl https://homebrew.bintray.com/bottles/libusb-1.0.22.el_capitan.bottle.tar.gz | \
|
||||
tar xz --directory $BUILDDIR
|
||||
cp $BUILDDIR/libusb/1.0.22/lib/libusb-1.0.dylib contrib/build-osx
|
||||
|
||||
info "Building libsecp256k1"
|
||||
brew install autoconf automake libtool
|
||||
git clone https://github.com/bitcoin-core/secp256k1 $BUILDDIR/secp256k1
|
||||
pushd $BUILDDIR/secp256k1
|
||||
git reset --hard $LIBSECP_VERSION
|
||||
git clean -f -x -q
|
||||
./autogen.sh
|
||||
./configure --enable-module-recovery --enable-experimental --enable-module-ecdh --disable-jni
|
||||
make
|
||||
popd
|
||||
cp $BUILDDIR/secp256k1/.libs/libsecp256k1.0.dylib contrib/build-osx
|
||||
|
||||
|
||||
info "Installing requirements..."
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements.txt --user && \
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements-binaries.txt --user || \
|
||||
fail "Could not install requirements"
|
||||
|
||||
info "Installing hardware wallet requirements..."
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements-hw.txt --user || \
|
||||
fail "Could not install hardware wallet requirements"
|
||||
|
||||
info "Building $PACKAGE..."
|
||||
python3 setup.py install --user > /dev/null || fail "Could not build $PACKAGE"
|
||||
|
||||
info "Faking timestamps..."
|
||||
for d in ~/Library/Python/ ~/.pyenv .; do
|
||||
pushd $d
|
||||
find . -exec touch -t '200101220000' {} +
|
||||
popd
|
||||
done
|
||||
|
||||
info "Building binary"
|
||||
pyinstaller --noconfirm --ascii --clean --name $VERSION contrib/build-osx/osx.spec || fail "Could not build binary"
|
||||
|
||||
info "Creating .DMG"
|
||||
hdiutil create -fs HFS+ -volname $PACKAGE -srcfolder dist/$PACKAGE.app dist/electrum-$VERSION.dmg || fail "Could not create .DMG"
|
||||
97
contrib/build-osx/osx.spec
Normal file
@ -0,0 +1,97 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, collect_dynamic_libs
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
PACKAGE='Electrum'
|
||||
PYPKG='electrum'
|
||||
MAIN_SCRIPT='electrum'
|
||||
ICONS_FILE='electrum.icns'
|
||||
|
||||
for i, x in enumerate(sys.argv):
|
||||
if x == '--name':
|
||||
VERSION = sys.argv[i+1]
|
||||
break
|
||||
else:
|
||||
raise Exception('no version')
|
||||
|
||||
electrum = os.path.abspath(".") + "/"
|
||||
block_cipher = None
|
||||
|
||||
# see https://github.com/pyinstaller/pyinstaller/issues/2005
|
||||
hiddenimports = []
|
||||
hiddenimports += collect_submodules('trezorlib')
|
||||
hiddenimports += collect_submodules('btchip')
|
||||
hiddenimports += collect_submodules('keepkeylib')
|
||||
hiddenimports += collect_submodules('websocket')
|
||||
|
||||
datas = [
|
||||
(electrum+'lib/*.json', PYPKG),
|
||||
(electrum+'lib/wordlist/english.txt', PYPKG + '/wordlist'),
|
||||
(electrum+'lib/locale', PYPKG + '/locale'),
|
||||
(electrum+'plugins', PYPKG + '_plugins'),
|
||||
]
|
||||
datas += collect_data_files('trezorlib')
|
||||
datas += collect_data_files('btchip')
|
||||
datas += collect_data_files('keepkeylib')
|
||||
|
||||
# Add libusb so Trezor will work
|
||||
binaries = [(electrum + "contrib/build-osx/libusb-1.0.dylib", ".")]
|
||||
binaries += [(electrum + "contrib/build-osx/libsecp256k1.0.dylib", ".")]
|
||||
|
||||
# Workaround for "Retro Look":
|
||||
binaries += [b for b in collect_dynamic_libs('PyQt5') if 'macstyle' in b[0]]
|
||||
|
||||
# We don't put these files in to actually include them in the script but to make the Analysis method scan them for imports
|
||||
a = Analysis([electrum+MAIN_SCRIPT,
|
||||
electrum+'gui/qt/main_window.py',
|
||||
electrum+'gui/text.py',
|
||||
electrum+'lib/util.py',
|
||||
electrum+'lib/wallet.py',
|
||||
electrum+'lib/simple_config.py',
|
||||
electrum+'lib/bitcoin.py',
|
||||
electrum+'lib/dnssec.py',
|
||||
electrum+'lib/commands.py',
|
||||
electrum+'plugins/cosigner_pool/qt.py',
|
||||
electrum+'plugins/email_requests/qt.py',
|
||||
electrum+'plugins/trezor/client.py',
|
||||
electrum+'plugins/trezor/qt.py',
|
||||
electrum+'plugins/keepkey/qt.py',
|
||||
electrum+'plugins/ledger/qt.py',
|
||||
],
|
||||
binaries=binaries,
|
||||
datas=datas,
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=[])
|
||||
|
||||
# http://stackoverflow.com/questions/19055089/pyinstaller-onefile-warning-pyconfig-h-when-importing-scipy-or-scipy-signal
|
||||
for d in a.datas:
|
||||
if 'pyconfig' in d[0]:
|
||||
a.datas.remove(d)
|
||||
break
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
name=PACKAGE,
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
icon=electrum+ICONS_FILE,
|
||||
console=False)
|
||||
|
||||
app = BUNDLE(exe,
|
||||
version = VERSION,
|
||||
name=PACKAGE + '.app',
|
||||
icon=electrum+ICONS_FILE,
|
||||
bundle_identifier=None,
|
||||
info_plist={
|
||||
'NSHighResolutionCapable': 'True',
|
||||
'NSSupportsAutomaticGraphicsSwitching': 'True'
|
||||
}
|
||||
)
|
||||
@ -85,4 +85,4 @@ dmg dmg Electrum_uncompressed.dmg electrum-$VERSION.dmg || fail "Unable to creat
|
||||
rm Electrum_uncompressed.dmg
|
||||
|
||||
echo "Done."
|
||||
sha256sum electrum-$VERSION.dmg
|
||||
md5sum electrum-$VERSION.dmg
|
||||
@ -2,8 +2,7 @@ Windows Binary Builds
|
||||
=====================
|
||||
|
||||
These scripts can be used for cross-compilation of Windows Electrum executables from Linux/Wine.
|
||||
|
||||
For reproducible builds, see the `docker` folder.
|
||||
Produced binaries are deterministic, so you should be able to generate binaries that match the official releases.
|
||||
|
||||
|
||||
Usage:
|
||||
@ -35,3 +34,49 @@ The binaries are also built by Travis CI, so if you are having problems,
|
||||
2. Make sure `/opt` is writable by the current user.
|
||||
3. Run `build.sh`.
|
||||
4. The generated binaries are in `./dist`.
|
||||
|
||||
|
||||
Code Signing
|
||||
============
|
||||
|
||||
Electrum Windows builds are signed with a Microsoft Authenticode™ code signing
|
||||
certificate in addition to the GPG-based signatures.
|
||||
|
||||
The advantage of using Authenticode is that Electrum users won't receive a
|
||||
Windows SmartScreen warning when starting it.
|
||||
|
||||
The release signing procedure involves a signer (the holder of the
|
||||
certificate/key) and one or multiple trusted verifiers:
|
||||
|
||||
|
||||
| Signer | Verifier |
|
||||
|-----------------------------------------------------------|-----------------------------------|
|
||||
| Build .exe files using `build.sh` | |
|
||||
| Sign .exe with `./sign.sh` | |
|
||||
| Upload signed files to download server | |
|
||||
| | Build .exe files using `build.sh` |
|
||||
| | Compare files using `unsign.sh` |
|
||||
| | Sign .exe file using `gpg -b` |
|
||||
|
||||
| Signer and verifiers:
|
||||
| Upload signatures to 'electrum-signatures' repo, as `$version/$filename.$builder.asc` |
|
||||
|
||||
|
||||
|
||||
|
||||
Verify Integrity of signed binary
|
||||
=================================
|
||||
|
||||
Every user can verify that the official binary was created from the source code in this
|
||||
repository. To do so, the Authenticode signature needs to be stripped since the signature
|
||||
is not reproducible.
|
||||
|
||||
This procedure removes the differences between the signed and unsigned binary:
|
||||
|
||||
1. Remove the signature from the signed binary using osslsigncode or signtool.
|
||||
2. Set the COFF image checksum for the signed binary to 0x0. This is necessary
|
||||
because pyinstaller doesn't generate a checksum.
|
||||
3. Append null bytes to the _unsigned_ binary until the byte count is a multiple
|
||||
of 8.
|
||||
|
||||
The script `unsign.sh` performs these steps.
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
NAME_ROOT=electrum
|
||||
PYTHON_VERSION=3.5.4
|
||||
|
||||
# These settings probably don't need any change
|
||||
export WINEPREFIX=/opt/wine64
|
||||
export PYTHONDONTWRITEBYTECODE=1
|
||||
export PYTHONHASHSEED=22
|
||||
|
||||
PYHOME=c:/python3
|
||||
PYHOME=c:/python$PYTHON_VERSION
|
||||
PYTHON="wine $PYHOME/python.exe -OO -B"
|
||||
|
||||
|
||||
@ -18,13 +19,29 @@ set -e
|
||||
mkdir -p tmp
|
||||
cd tmp
|
||||
|
||||
pushd $WINEPREFIX/drive_c/electrum
|
||||
if [ -d ./electrum ]; then
|
||||
rm ./electrum -rf
|
||||
fi
|
||||
|
||||
# Load electrum-locale for this release
|
||||
git clone https://github.com/spesmilo/electrum -b master
|
||||
|
||||
pushd electrum
|
||||
if [ ! -z "$1" ]; then
|
||||
# a commit/tag/branch was specified
|
||||
if ! git cat-file -e "$1" 2> /dev/null
|
||||
then # can't find target
|
||||
# try pull requests
|
||||
git config --local --add remote.origin.fetch '+refs/pull/*/merge:refs/remotes/origin/pr/*'
|
||||
git fetch --all
|
||||
fi
|
||||
git checkout $1
|
||||
fi
|
||||
|
||||
# Load electrum-icons and electrum-locale for this release
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
VERSION=`git describe --tags --dirty --always`
|
||||
VERSION=`git describe --tags --dirty`
|
||||
echo "Last commit: $VERSION"
|
||||
|
||||
pushd ./contrib/deterministic-build/electrum-locale
|
||||
@ -33,7 +50,7 @@ if ! which msgfmt > /dev/null 2>&1; then
|
||||
exit 1
|
||||
fi
|
||||
for i in ./locale/*; do
|
||||
dir=$WINEPREFIX/drive_c/electrum/electrum/$i/LC_MESSAGES
|
||||
dir=$i/LC_MESSAGES
|
||||
mkdir -p $dir
|
||||
msgfmt --output-file=$dir/electrum.mo $i/electrum.po || true
|
||||
done
|
||||
@ -42,7 +59,11 @@ popd
|
||||
find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
|
||||
popd
|
||||
|
||||
cp $WINEPREFIX/drive_c/electrum/LICENCE .
|
||||
rm -rf $WINEPREFIX/drive_c/electrum
|
||||
cp -r electrum $WINEPREFIX/drive_c/electrum
|
||||
cp electrum/LICENCE .
|
||||
cp -r ./electrum/contrib/deterministic-build/electrum-locale/locale $WINEPREFIX/drive_c/electrum/lib/
|
||||
cp ./electrum/contrib/deterministic-build/electrum-icons/icons_rc.py $WINEPREFIX/drive_c/electrum/gui/qt/
|
||||
|
||||
# Install frozen dependencies
|
||||
$PYTHON -m pip install -r ../../deterministic-build/requirements.txt
|
||||
@ -50,7 +71,7 @@ $PYTHON -m pip install -r ../../deterministic-build/requirements.txt
|
||||
$PYTHON -m pip install -r ../../deterministic-build/requirements-hw.txt
|
||||
|
||||
pushd $WINEPREFIX/drive_c/electrum
|
||||
$PYTHON -m pip install .
|
||||
$PYTHON setup.py install
|
||||
popd
|
||||
|
||||
cd ..
|
||||
@ -58,7 +79,7 @@ cd ..
|
||||
rm -rf dist/
|
||||
|
||||
# build standalone and portable versions
|
||||
wine "$PYHOME/scripts/pyinstaller.exe" --noconfirm --ascii --clean --name $NAME_ROOT-$VERSION -w deterministic.spec
|
||||
wine "C:/python$PYTHON_VERSION/scripts/pyinstaller.exe" --noconfirm --ascii --clean --name $NAME_ROOT-$VERSION -w deterministic.spec
|
||||
|
||||
# set timestamps in dist, in order to make the installer reproducible
|
||||
pushd dist
|
||||
@ -74,4 +95,4 @@ mv electrum-setup.exe $NAME_ROOT-$VERSION-setup.exe
|
||||
cd ..
|
||||
|
||||
echo "Done."
|
||||
sha256sum dist/electrum*exe
|
||||
md5sum dist/electrum*exe
|
||||
|
||||
@ -29,8 +29,7 @@ else
|
||||
git pull
|
||||
fi
|
||||
|
||||
LIBSECP_VERSION="b408c6a8b287003d1ade5709e6f7bc3c7f1d5be7"
|
||||
git reset --hard "$LIBSECP_VERSION"
|
||||
git reset --hard 452d8e4d2a2f9f1b5be6b02e18f1ba102e5ca0b4
|
||||
git clean -f -x -q
|
||||
|
||||
build_dll i686-w64-mingw32 # 64-bit would be: x86_64-w64-mingw32
|
||||
|
||||
@ -2,6 +2,10 @@
|
||||
# Lucky number
|
||||
export PYTHONHASHSEED=22
|
||||
|
||||
if [ ! -z "$1" ]; then
|
||||
to_build="$1"
|
||||
fi
|
||||
|
||||
here=$(dirname "$0")
|
||||
test -n "$here" -a -d "$here" || exit
|
||||
|
||||
@ -24,5 +28,5 @@ find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
|
||||
popd
|
||||
ls -l /opt/wine64/drive_c/python*
|
||||
|
||||
$here/build-electrum-git.sh && \
|
||||
$here/build-electrum-git.sh $to_build && \
|
||||
echo "Done."
|
||||
|
||||
@ -10,23 +10,17 @@ for i, x in enumerate(sys.argv):
|
||||
else:
|
||||
raise Exception('no name')
|
||||
|
||||
PYHOME = 'c:/python3'
|
||||
PYTHON_VERSION = '3.5.4'
|
||||
PYHOME = 'c:/python' + PYTHON_VERSION
|
||||
|
||||
home = 'C:\\electrum\\'
|
||||
|
||||
# see https://github.com/pyinstaller/pyinstaller/issues/2005
|
||||
hiddenimports = []
|
||||
hiddenimports += collect_submodules('trezorlib')
|
||||
hiddenimports += collect_submodules('safetlib')
|
||||
hiddenimports += collect_submodules('btchip')
|
||||
hiddenimports += collect_submodules('keepkeylib')
|
||||
hiddenimports += collect_submodules('websocket')
|
||||
hiddenimports += collect_submodules('ckcc')
|
||||
|
||||
# safetlib imports PyQt5.Qt. We use a local updated copy of pinmatrix.py until they
|
||||
# release a new version that includes https://github.com/archos-safe-t/python-safet/commit/b1eab3dba4c04fdfc1fcf17b66662c28c5f2380e
|
||||
hiddenimports.remove('safetlib.qt.pinmatrix')
|
||||
|
||||
|
||||
# Add libusb binary
|
||||
binaries = [(PYHOME+"/libusb-1.0.dll", ".")]
|
||||
@ -37,37 +31,32 @@ binaries += [b for b in collect_dynamic_libs('PyQt5') if 'qwindowsvista' in b[0]
|
||||
binaries += [('C:/tmp/libsecp256k1.dll', '.')]
|
||||
|
||||
datas = [
|
||||
(home+'electrum/*.json', 'electrum'),
|
||||
(home+'electrum/wordlist/english.txt', 'electrum/wordlist'),
|
||||
(home+'electrum/locale', 'electrum/locale'),
|
||||
(home+'electrum/plugins', 'electrum/plugins'),
|
||||
('C:\\Program Files (x86)\\ZBar\\bin\\', '.'),
|
||||
(home+'electrum/gui/icons', 'electrum/gui/icons'),
|
||||
(home+'lib/*.json', 'electrum'),
|
||||
(home+'lib/wordlist/english.txt', 'electrum/wordlist'),
|
||||
(home+'lib/locale', 'electrum/locale'),
|
||||
(home+'plugins', 'electrum_plugins'),
|
||||
('C:\\Program Files (x86)\\ZBar\\bin\\', '.')
|
||||
]
|
||||
datas += collect_data_files('trezorlib')
|
||||
datas += collect_data_files('safetlib')
|
||||
datas += collect_data_files('btchip')
|
||||
datas += collect_data_files('keepkeylib')
|
||||
datas += collect_data_files('ckcc')
|
||||
|
||||
# We don't put these files in to actually include them in the script but to make the Analysis method scan them for imports
|
||||
a = Analysis([home+'run_electrum',
|
||||
home+'electrum/gui/qt/main_window.py',
|
||||
home+'electrum/gui/text.py',
|
||||
home+'electrum/util.py',
|
||||
home+'electrum/wallet.py',
|
||||
home+'electrum/simple_config.py',
|
||||
home+'electrum/bitcoin.py',
|
||||
home+'electrum/dnssec.py',
|
||||
home+'electrum/commands.py',
|
||||
home+'electrum/plugins/cosigner_pool/qt.py',
|
||||
home+'electrum/plugins/email_requests/qt.py',
|
||||
home+'electrum/plugins/trezor/qt.py',
|
||||
home+'electrum/plugins/safe_t/client.py',
|
||||
home+'electrum/plugins/safe_t/qt.py',
|
||||
home+'electrum/plugins/keepkey/qt.py',
|
||||
home+'electrum/plugins/ledger/qt.py',
|
||||
home+'electrum/plugins/coldcard/qt.py',
|
||||
a = Analysis([home+'electrum',
|
||||
home+'gui/qt/main_window.py',
|
||||
home+'gui/text.py',
|
||||
home+'lib/util.py',
|
||||
home+'lib/wallet.py',
|
||||
home+'lib/simple_config.py',
|
||||
home+'lib/bitcoin.py',
|
||||
home+'lib/dnssec.py',
|
||||
home+'lib/commands.py',
|
||||
home+'plugins/cosigner_pool/qt.py',
|
||||
home+'plugins/email_requests/qt.py',
|
||||
home+'plugins/trezor/client.py',
|
||||
home+'plugins/trezor/qt.py',
|
||||
home+'plugins/keepkey/qt.py',
|
||||
home+'plugins/ledger/qt.py',
|
||||
#home+'packages/requests/utils.py'
|
||||
],
|
||||
binaries=binaries,
|
||||
@ -79,28 +68,10 @@ a = Analysis([home+'run_electrum',
|
||||
|
||||
# http://stackoverflow.com/questions/19055089/pyinstaller-onefile-warning-pyconfig-h-when-importing-scipy-or-scipy-signal
|
||||
for d in a.datas:
|
||||
if 'pyconfig' in d[0]:
|
||||
if 'pyconfig' in d[0]:
|
||||
a.datas.remove(d)
|
||||
break
|
||||
|
||||
# Strip out parts of Qt that we never use. Reduces binary size by tens of MBs. see #4815
|
||||
qt_bins2remove=('qt5web', 'qt53d', 'qt5game', 'qt5designer', 'qt5quick',
|
||||
'qt5location', 'qt5test', 'qt5xml', r'pyqt5\qt\qml\qtquick')
|
||||
print("Removing Qt binaries:", *qt_bins2remove)
|
||||
for x in a.binaries.copy():
|
||||
for r in qt_bins2remove:
|
||||
if x[0].lower().startswith(r):
|
||||
a.binaries.remove(x)
|
||||
print('----> Removed x =', x)
|
||||
|
||||
qt_data2remove=(r'pyqt5\qt\translations\qtwebengine_locales', )
|
||||
print("Removing Qt datas:", *qt_data2remove)
|
||||
for x in a.datas.copy():
|
||||
for r in qt_data2remove:
|
||||
if x[0].lower().startswith(r):
|
||||
a.datas.remove(x)
|
||||
print('----> Removed x =', x)
|
||||
|
||||
# hotfix for #3171 (pre-Win10 binaries)
|
||||
a.binaries = [x for x in a.binaries if not x[1].lower().startswith(r'c:\windows')]
|
||||
|
||||
@ -114,12 +85,12 @@ exe_standalone = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
a.datas,
|
||||
name=os.path.join('build\\pyi.win32\\electrum', cmdline_name + ".exe"),
|
||||
debug=False,
|
||||
strip=None,
|
||||
upx=False,
|
||||
icon=home+'electrum/gui/icons/electrum.ico',
|
||||
icon=home+'icons/electrum.ico',
|
||||
console=False)
|
||||
# console=True makes an annoying black box pop up, but it does make Electrum output command line commands, with this turned off no output will be given but commands can still be used
|
||||
|
||||
@ -132,7 +103,7 @@ exe_portable = EXE(
|
||||
debug=False,
|
||||
strip=None,
|
||||
upx=False,
|
||||
icon=home+'electrum/gui/icons/electrum.ico',
|
||||
icon=home+'icons/electrum.ico',
|
||||
console=False)
|
||||
|
||||
#####
|
||||
@ -146,7 +117,7 @@ exe_dependent = EXE(
|
||||
debug=False,
|
||||
strip=None,
|
||||
upx=False,
|
||||
icon=home+'electrum/gui/icons/electrum.ico',
|
||||
icon=home+'icons/electrum.ico',
|
||||
console=False)
|
||||
|
||||
coll = COLLECT(
|
||||
@ -157,6 +128,6 @@ coll = COLLECT(
|
||||
strip=None,
|
||||
upx=True,
|
||||
debug=False,
|
||||
icon=home+'electrum/gui/icons/electrum.ico',
|
||||
icon=home+'icons/electrum.ico',
|
||||
console=False,
|
||||
name=os.path.join('dist', 'electrum'))
|
||||
|
||||
@ -6,36 +6,28 @@ RUN dpkg --add-architecture i386 && \
|
||||
apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
wget=1.19.4-1ubuntu2.1 \
|
||||
gnupg2=2.2.4-1ubuntu1.2 \
|
||||
dirmngr=2.2.4-1ubuntu1.2 \
|
||||
python3-software-properties=0.96.24.32.1 \
|
||||
software-properties-common=0.96.24.32.1
|
||||
|
||||
RUN apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
git=1:2.17.1-1ubuntu0.4 \
|
||||
gnupg2=2.2.4-1ubuntu1.1 \
|
||||
dirmngr=2.2.4-1ubuntu1.1 \
|
||||
software-properties-common=0.96.24.32.3 \
|
||||
&& \
|
||||
wget -nc https://dl.winehq.org/wine-builds/Release.key && \
|
||||
apt-key add Release.key && \
|
||||
apt-add-repository https://dl.winehq.org/wine-builds/ubuntu/ && \
|
||||
apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
wine-stable-amd64:amd64=3.0.1~bionic \
|
||||
wine-stable-i386:i386=3.0.1~bionic \
|
||||
wine-stable:amd64=3.0.1~bionic \
|
||||
winehq-stable:amd64=3.0.1~bionic \
|
||||
git=1:2.17.1-1ubuntu0.1 \
|
||||
p7zip-full=16.02+dfsg-6 \
|
||||
make=4.1-9.1ubuntu1 \
|
||||
mingw-w64=5.0.3-1 \
|
||||
autotools-dev=20180224.1 \
|
||||
autoconf=2.69-11 \
|
||||
libtool=2.4.6-2 \
|
||||
gettext=0.19.8.1-6
|
||||
|
||||
RUN wget -nc https://dl.winehq.org/wine-builds/Release.key && \
|
||||
echo "c51bcb8cc4a12abfbd7c7660eaf90f49674d15e222c262f27e6c96429111b822 Release.key" | sha256sum -c - && \
|
||||
apt-key add Release.key && \
|
||||
wget -nc https://dl.winehq.org/wine-builds/winehq.key && \
|
||||
echo "78b185fabdb323971d13bd329fefc8038e08559aa51c4996de18db0639a51df6 winehq.key" | sha256sum -c - && \
|
||||
apt-key add winehq.key && \
|
||||
apt-add-repository https://dl.winehq.org/wine-builds/ubuntu/ && \
|
||||
apt-get update -q && \
|
||||
apt-get install -qy \
|
||||
wine-stable-amd64:amd64=4.0~bionic \
|
||||
wine-stable-i386:i386=4.0~bionic \
|
||||
wine-stable:amd64=4.0~bionic \
|
||||
winehq-stable:amd64=4.0~bionic
|
||||
|
||||
RUN rm -rf /var/lib/apt/lists/* && \
|
||||
gettext=0.19.8.1-6 \
|
||||
&& \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
apt-get autoremove -y && \
|
||||
apt-get clean
|
||||
|
||||
@ -1,9 +1,6 @@
|
||||
Deterministic Windows binaries with Docker
|
||||
==========================================
|
||||
|
||||
Produced binaries are deterministic, so you should be able to generate
|
||||
binaries that match the official releases.
|
||||
|
||||
This assumes an Ubuntu host, but it should not be too hard to adapt to another
|
||||
similar system. The docker commands should be executed in the project's root
|
||||
folder.
|
||||
@ -20,84 +17,25 @@ folder.
|
||||
2. Build image
|
||||
|
||||
```
|
||||
$ sudo docker build -t electrum-wine-builder-img contrib/build-wine/docker
|
||||
$ sudo docker build --no-cache -t electrum-wine-builder-img contrib/build-wine/docker
|
||||
```
|
||||
|
||||
Note: see [this](https://stackoverflow.com/a/40516974/7499128) if having dns problems
|
||||
|
||||
3. Build Windows binaries
|
||||
|
||||
It's recommended to build from a fresh clone
|
||||
(but you can skip this if reproducibility is not necessary).
|
||||
|
||||
```
|
||||
$ FRESH_CLONE=contrib/build-wine/fresh_clone && \
|
||||
rm -rf $FRESH_CLONE && \
|
||||
mkdir -p $FRESH_CLONE && \
|
||||
cd $FRESH_CLONE && \
|
||||
git clone https://github.com/spesmilo/electrum.git && \
|
||||
cd electrum
|
||||
```
|
||||
|
||||
And then build from this directory:
|
||||
```
|
||||
$ git checkout $REV
|
||||
$ sudo docker run -it \
|
||||
$ TARGET=master
|
||||
$ sudo docker run \
|
||||
--name electrum-wine-builder-cont \
|
||||
-v $PWD:/opt/wine64/drive_c/electrum \
|
||||
-v .:/opt/electrum \
|
||||
--rm \
|
||||
--workdir /opt/wine64/drive_c/electrum/contrib/build-wine \
|
||||
--workdir /opt/electrum/contrib/build-wine \
|
||||
electrum-wine-builder-img \
|
||||
./build.sh
|
||||
./build.sh $TARGET
|
||||
```
|
||||
4. The generated binaries are in `./contrib/build-wine/dist`.
|
||||
|
||||
|
||||
|
||||
Note: the `setup` binary (NSIS installer) is not deterministic yet.
|
||||
|
||||
|
||||
Code Signing
|
||||
============
|
||||
|
||||
Electrum Windows builds are signed with a Microsoft Authenticode™ code signing
|
||||
certificate in addition to the GPG-based signatures.
|
||||
|
||||
The advantage of using Authenticode is that Electrum users won't receive a
|
||||
Windows SmartScreen warning when starting it.
|
||||
|
||||
The release signing procedure involves a signer (the holder of the
|
||||
certificate/key) and one or multiple trusted verifiers:
|
||||
|
||||
|
||||
| Signer | Verifier |
|
||||
|-----------------------------------------------------------|-----------------------------------|
|
||||
| Build .exe files using `build.sh` | |
|
||||
| Sign .exe with `./sign.sh` | |
|
||||
| Upload signed files to download server | |
|
||||
| | Build .exe files using `build.sh` |
|
||||
| | Compare files using `unsign.sh` |
|
||||
| | Sign .exe file using `gpg -b` |
|
||||
|
||||
| Signer and verifiers: |
|
||||
|-----------------------------------------------------------------------------------------------|
|
||||
| Upload signatures to 'electrum-signatures' repo, as `$version/$filename.$builder.asc` |
|
||||
|
||||
|
||||
|
||||
Verify Integrity of signed binary
|
||||
=================================
|
||||
|
||||
Every user can verify that the official binary was created from the source code in this
|
||||
repository. To do so, the Authenticode signature needs to be stripped since the signature
|
||||
is not reproducible.
|
||||
|
||||
This procedure removes the differences between the signed and unsigned binary:
|
||||
|
||||
1. Remove the signature from the signed binary using osslsigncode or signtool.
|
||||
2. Set the COFF image checksum for the signed binary to 0x0. This is necessary
|
||||
because pyinstaller doesn't generate a checksum.
|
||||
3. Append null bytes to the _unsigned_ binary until the byte count is a multiple
|
||||
of 8.
|
||||
|
||||
The script `unsign.sh` performs these steps.
|
||||
|
||||
@ -58,7 +58,7 @@
|
||||
VIAddVersionKey ProductName "${PRODUCT_NAME} Installer"
|
||||
VIAddVersionKey Comments "The installer for ${PRODUCT_NAME}"
|
||||
VIAddVersionKey CompanyName "${PRODUCT_NAME}"
|
||||
VIAddVersionKey LegalCopyright "2013-2018 ${PRODUCT_PUBLISHER}"
|
||||
VIAddVersionKey LegalCopyright "2013-2016 ${PRODUCT_PUBLISHER}"
|
||||
VIAddVersionKey FileDescription "${PRODUCT_NAME} Installer"
|
||||
VIAddVersionKey FileVersion ${PRODUCT_VERSION}
|
||||
VIAddVersionKey ProductVersion ${PRODUCT_VERSION}
|
||||
@ -72,7 +72,7 @@
|
||||
!define MUI_ABORTWARNING
|
||||
!define MUI_ABORTWARNING_TEXT "Are you sure you wish to abort the installation of ${PRODUCT_NAME}?"
|
||||
|
||||
!define MUI_ICON "c:\electrum\electrum\gui\icons\electrum.ico"
|
||||
!define MUI_ICON "tmp\electrum\icons\electrum.ico"
|
||||
|
||||
;--------------------------------
|
||||
;Pages
|
||||
@ -111,7 +111,7 @@ Section
|
||||
|
||||
;Files to pack into the installer
|
||||
File /r "dist\electrum\*.*"
|
||||
File "c:\electrum\electrum\gui\icons\electrum.ico"
|
||||
File "..\..\icons\electrum.ico"
|
||||
|
||||
;Store installation folder
|
||||
WriteRegStr HKCU "Software\${PRODUCT_NAME}" "" $INSTDIR
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Please update these carefully, some versions won't work under Wine
|
||||
NSIS_FILENAME=nsis-3.04-setup.exe
|
||||
NSIS_FILENAME=nsis-3.03-setup.exe
|
||||
NSIS_URL=https://prdownloads.sourceforge.net/nsis/$NSIS_FILENAME?download
|
||||
NSIS_SHA256=4e1db5a7400e348b1b46a4a11b6d9557fd84368e4ad3d4bc4c1be636c89638aa
|
||||
NSIS_SHA256=bd3b15ab62ec6b0c7a00f46022d441af03277be893326f6fea8e212dc2d77743
|
||||
|
||||
ZBAR_FILENAME=zbarw-20121031-setup.exe
|
||||
ZBAR_URL=https://sourceforge.net/projects/zbarw/files/$ZBAR_FILENAME/download
|
||||
@ -13,26 +13,79 @@ LIBUSB_FILENAME=libusb-1.0.22.7z
|
||||
LIBUSB_URL=https://prdownloads.sourceforge.net/project/libusb/libusb-1.0/libusb-1.0.22/$LIBUSB_FILENAME?download
|
||||
LIBUSB_SHA256=671f1a420757b4480e7fadc8313d6fb3cbb75ca00934c417c1efa6e77fb8779b
|
||||
|
||||
PYTHON_VERSION=3.6.8
|
||||
PYTHON_VERSION=3.5.4
|
||||
|
||||
## These settings probably don't need change
|
||||
export WINEPREFIX=/opt/wine64
|
||||
#export WINEARCH='win32'
|
||||
|
||||
PYTHON_FOLDER="python3"
|
||||
PYHOME="c:/$PYTHON_FOLDER"
|
||||
PYHOME=c:/python$PYTHON_VERSION
|
||||
PYTHON="wine $PYHOME/python.exe -OO -B"
|
||||
|
||||
|
||||
# based on https://superuser.com/questions/497940/script-to-verify-a-signature-with-gpg
|
||||
verify_signature() {
|
||||
local file=$1 keyring=$2 out=
|
||||
if out=$(gpg --no-default-keyring --keyring "$keyring" --status-fd 1 --verify "$file" 2>/dev/null) &&
|
||||
echo "$out" | grep -qs "^\[GNUPG:\] VALIDSIG "; then
|
||||
return 0
|
||||
else
|
||||
echo "$out" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
verify_hash() {
|
||||
local file=$1 expected_hash=$2
|
||||
actual_hash=$(sha256sum $file | awk '{print $1}')
|
||||
if [ "$actual_hash" == "$expected_hash" ]; then
|
||||
return 0
|
||||
else
|
||||
echo "$file $actual_hash (unexpected hash)" >&2
|
||||
rm "$file"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
download_if_not_exist() {
|
||||
local file_name=$1 url=$2
|
||||
if [ ! -e $file_name ] ; then
|
||||
wget -O $PWD/$file_name "$url"
|
||||
fi
|
||||
}
|
||||
|
||||
# https://github.com/travis-ci/travis-build/blob/master/lib/travis/build/templates/header.sh
|
||||
retry() {
|
||||
local result=0
|
||||
local count=1
|
||||
while [ $count -le 3 ]; do
|
||||
[ $result -ne 0 ] && {
|
||||
echo -e "\nThe command \"$@\" failed. Retrying, $count of 3.\n" >&2
|
||||
}
|
||||
! { "$@"; result=$?; }
|
||||
[ $result -eq 0 ] && break
|
||||
count=$(($count + 1))
|
||||
sleep 1
|
||||
done
|
||||
|
||||
[ $count -gt 3 ] && {
|
||||
echo -e "\nThe command \"$@\" failed 3 times.\n" >&2
|
||||
}
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
# Let's begin!
|
||||
here="$(dirname "$(readlink -e "$0")")"
|
||||
here=$(dirname $(readlink -e $0))
|
||||
set -e
|
||||
|
||||
. $here/../build_tools_util.sh
|
||||
# Clean up Wine environment
|
||||
echo "Cleaning $WINEPREFIX"
|
||||
rm -rf $WINEPREFIX
|
||||
echo "done"
|
||||
|
||||
wine 'wineboot'
|
||||
|
||||
|
||||
cd /tmp/electrum-build
|
||||
|
||||
# Install Python
|
||||
@ -43,7 +96,8 @@ KEYRING_PYTHON_DEV="keyring-electrum-build-python-dev.gpg"
|
||||
for server in $(shuf -e ha.pool.sks-keyservers.net \
|
||||
hkp://p80.pool.sks-keyservers.net:80 \
|
||||
keyserver.ubuntu.com \
|
||||
hkp://keyserver.ubuntu.com:80) ; do
|
||||
hkp://keyserver.ubuntu.com:80 \
|
||||
pgp.mit.edu) ; do
|
||||
retry gpg --no-default-keyring --keyring $KEYRING_PYTHON_DEV --keyserver "$server" --recv-keys $KEYLIST_PYTHON_DEV \
|
||||
&& break || : ;
|
||||
done
|
||||
@ -52,21 +106,31 @@ for msifile in core dev exe lib pip tools; do
|
||||
wget -N -c "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi"
|
||||
wget -N -c "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi.asc"
|
||||
verify_signature "${msifile}.msi.asc" $KEYRING_PYTHON_DEV
|
||||
wine msiexec /i "${msifile}.msi" /qb TARGETDIR=$PYHOME
|
||||
wine msiexec /i "${msifile}.msi" /qb TARGETDIR=C:/python$PYTHON_VERSION
|
||||
done
|
||||
|
||||
# Install dependencies specific to binaries
|
||||
# note that this also installs pinned versions of both pip and setuptools
|
||||
$PYTHON -m pip install -r "$here"/../deterministic-build/requirements-binaries.txt
|
||||
# upgrade pip
|
||||
$PYTHON -m pip install pip --upgrade
|
||||
|
||||
# Install pywin32-ctypes (needed by pyinstaller)
|
||||
$PYTHON -m pip install pywin32-ctypes==0.1.2
|
||||
|
||||
# install PySocks
|
||||
$PYTHON -m pip install win_inet_pton==1.0.1
|
||||
|
||||
$PYTHON -m pip install -r $here/../deterministic-build/requirements-binaries.txt
|
||||
|
||||
# Install PyInstaller
|
||||
$PYTHON -m pip install pyinstaller==3.4 --no-use-pep517
|
||||
$PYTHON -m pip install https://github.com/ecdsa/pyinstaller/archive/fix_2952.zip
|
||||
|
||||
# Install ZBar
|
||||
download_if_not_exist $ZBAR_FILENAME "$ZBAR_URL"
|
||||
verify_hash $ZBAR_FILENAME "$ZBAR_SHA256"
|
||||
wine "$PWD/$ZBAR_FILENAME" /S
|
||||
|
||||
# Upgrade setuptools (so Electrum can be installed later)
|
||||
$PYTHON -m pip install setuptools --upgrade
|
||||
|
||||
# Install NSIS installer
|
||||
download_if_not_exist $NSIS_FILENAME "$NSIS_URL"
|
||||
verify_hash $NSIS_FILENAME "$NSIS_SHA256"
|
||||
@ -76,7 +140,10 @@ download_if_not_exist $LIBUSB_FILENAME "$LIBUSB_URL"
|
||||
verify_hash $LIBUSB_FILENAME "$LIBUSB_SHA256"
|
||||
7z x -olibusb $LIBUSB_FILENAME -aoa
|
||||
|
||||
cp libusb/MS32/dll/libusb-1.0.dll $WINEPREFIX/drive_c/$PYTHON_FOLDER/
|
||||
cp libusb/MS32/dll/libusb-1.0.dll $WINEPREFIX/drive_c/python$PYTHON_VERSION/
|
||||
|
||||
# add dlls needed for pyinstaller:
|
||||
cp $WINEPREFIX/drive_c/python$PYTHON_VERSION/Lib/site-packages/PyQt5/Qt/bin/* $WINEPREFIX/drive_c/python$PYTHON_VERSION/
|
||||
|
||||
mkdir -p $WINEPREFIX/drive_c/tmp
|
||||
cp secp256k1/libsecp256k1.dll $WINEPREFIX/drive_c/tmp/
|
||||
|
||||
@ -1,69 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
RED='\033[0;31m'
|
||||
BLUE='\033[0;34m'
|
||||
YELLOW='\033[0;33m'
|
||||
NC='\033[0m' # No Color
|
||||
function info {
|
||||
printf "\r💬 ${BLUE}INFO:${NC} ${1}\n"
|
||||
}
|
||||
function fail {
|
||||
printf "\r🗯 ${RED}ERROR:${NC} ${1}\n"
|
||||
exit 1
|
||||
}
|
||||
function warn {
|
||||
printf "\r⚠️ ${YELLOW}WARNING:${NC} ${1}\n"
|
||||
}
|
||||
|
||||
|
||||
# based on https://superuser.com/questions/497940/script-to-verify-a-signature-with-gpg
|
||||
function verify_signature() {
|
||||
local file=$1 keyring=$2 out=
|
||||
if out=$(gpg --no-default-keyring --keyring "$keyring" --status-fd 1 --verify "$file" 2>/dev/null) &&
|
||||
echo "$out" | grep -qs "^\[GNUPG:\] VALIDSIG "; then
|
||||
return 0
|
||||
else
|
||||
echo "$out" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function verify_hash() {
|
||||
local file=$1 expected_hash=$2
|
||||
actual_hash=$(sha256sum $file | awk '{print $1}')
|
||||
if [ "$actual_hash" == "$expected_hash" ]; then
|
||||
return 0
|
||||
else
|
||||
echo "$file $actual_hash (unexpected hash)" >&2
|
||||
rm "$file"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function download_if_not_exist() {
|
||||
local file_name=$1 url=$2
|
||||
if [ ! -e $file_name ] ; then
|
||||
wget -O $file_name "$url"
|
||||
fi
|
||||
}
|
||||
|
||||
# https://github.com/travis-ci/travis-build/blob/master/lib/travis/build/templates/header.sh
|
||||
function retry() {
|
||||
local result=0
|
||||
local count=1
|
||||
while [ $count -le 3 ]; do
|
||||
[ $result -ne 0 ] && {
|
||||
echo -e "\nThe command \"$@\" failed. Retrying, $count of 3.\n" >&2
|
||||
}
|
||||
! { "$@"; result=$?; }
|
||||
[ $result -eq 0 ] && break
|
||||
count=$(($count + 1))
|
||||
sleep 1
|
||||
done
|
||||
|
||||
[ $count -gt 3 ] && {
|
||||
echo -e "\nThe command \"$@\" failed 3 times.\n" >&2
|
||||
}
|
||||
|
||||
return $result
|
||||
}
|
||||
@ -18,6 +18,13 @@ function get_git_mtime {
|
||||
|
||||
fail=0
|
||||
|
||||
for f in icons/* "icons.qrc"; do
|
||||
if (( $(get_git_mtime "$f") > $(get_git_mtime "contrib/deterministic-build/electrum-icons/") )); then
|
||||
echo "Modification time of $f (" $(get_git_mtime --readable "$f") ") is newer than"\
|
||||
"last update of electrum-icons"
|
||||
fail=1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $(date +%s -d "2 weeks ago") -gt $(get_git_mtime "contrib/deterministic-build/electrum-locale/") ]; then
|
||||
echo "Last update from electrum-locale is older than 2 weeks."\
|
||||
|
||||
1
contrib/deterministic-build/electrum-icons
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 5af76dc1b04c782e622ac409cd802c483c529c1f
|
||||
@ -1 +1 @@
|
||||
Subproject commit ff5ad3a4436dddcc82799f8a91793013240c3b7b
|
||||
Subproject commit de999ceffd2a864df54451d23f290ef5f333e8ea
|
||||
@ -1,10 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError as e:
|
||||
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
|
||||
import requests
|
||||
|
||||
|
||||
def check_restriction(p, r):
|
||||
|
||||
@ -1,56 +1,55 @@
|
||||
pip==19.0.1 \
|
||||
--hash=sha256:aae79c7afe895fb986ec751564f24d97df1331bb99cdfec6f70dada2f40c0044 \
|
||||
--hash=sha256:e81ddd35e361b630e94abeda4a1eddd36d47a90e71eb00f38f46b57f787cd1a5
|
||||
pycryptodomex==3.7.3 \
|
||||
--hash=sha256:0bda549e20db1eb8e29fb365d10acf84b224d813b1131c828fc830b2ce313dcd \
|
||||
--hash=sha256:1210c0818e5334237b16d99b5785aa0cee815d9997ee258bd5e2936af8e8aa50 \
|
||||
--hash=sha256:2090dc8cd7843eae75bd504b9be86792baa171fc5a758ea3f60188ab67ca95cf \
|
||||
--hash=sha256:22e6784b65dfdd357bf9a8a842db445192b227103e2c3137a28c489c46742135 \
|
||||
--hash=sha256:2edb8c3965a77e3092b5c5c1233ffd32de083f335202013f52d662404191ac79 \
|
||||
--hash=sha256:310fe269ac870135ff610d272e88dcb594ee58f40ac237a688d7c972cbca43e8 \
|
||||
--hash=sha256:456136b7d459f000794a67b23558351c72e21f0c2d4fcaa09fc99dae7844b0ef \
|
||||
--hash=sha256:463e49a9c5f1fa7bd36aff8debae0b5c487868c1fb66704529f2ad7e92f0cc9f \
|
||||
--hash=sha256:4a33b2828799ef8be789a462e6645ea6fe2c42b0df03e6763ccbfd1789c453e6 \
|
||||
--hash=sha256:5ff02dff1b03929e6339226b318aa59bd0b5c362f96e3e0eb7f3401d30594ed3 \
|
||||
--hash=sha256:6b1db8234b8ee2b30435d9e991389c2eeae4d45e09e471ffe757ba1dfae682bb \
|
||||
--hash=sha256:6eb67ee02de143cd19e36a52bd3869a9dc53e9184cd6bed5c39ff71dee2f6a45 \
|
||||
--hash=sha256:6f42eea5afc7eee29494fdfddc6bb7173953d4197d9200e4f67096c2a24bc21b \
|
||||
--hash=sha256:87bc8082e2de2247df7d0b161234f8edb1384294362cc0c8db9324463097578b \
|
||||
--hash=sha256:8df93d34bc0e3a28a27652070164683a07d8a50c628119d6e0f7710f4d01b42f \
|
||||
--hash=sha256:989952c39e8fef1c959f0a0f85656e29c41c01162e33a3f5fd8ce71e47262ae9 \
|
||||
--hash=sha256:a4a203077e2f312ec8677dde80a5c4e6fe5a82a46173a8edc8da668602a3e073 \
|
||||
--hash=sha256:a793c1242dffd39f585ae356344e8935d30f01f6be7d4c62ffc87af376a2f5f9 \
|
||||
--hash=sha256:b70fe991564e178af02ccf89435a8f9e8d052707a7c4b95bf6027cb785da3175 \
|
||||
--hash=sha256:b83594196e3661cb78c97b80a62fbfbba2add459dfd532b58e7a7c62dd06aab4 \
|
||||
--hash=sha256:ba27725237d0a3ea66ec2b6b387259471840908836711a3b215160808dffed0f \
|
||||
--hash=sha256:d1ab8ad1113cdc553ca50c4d5f0142198c317497364c0c70443d69f7ad1c9288 \
|
||||
--hash=sha256:dce039a8a8a318d7af83cae3fd08d58cefd2120075dfac0ae14d706974040f63 \
|
||||
--hash=sha256:e3213037ea33c85ab705579268cbc8a4433357e9fb99ec7ce9fdcc4d4eec1d50 \
|
||||
--hash=sha256:ec8d8023d31ef72026d46e9fb301ff8759eff5336bcf3d1510836375f53f96a9 \
|
||||
--hash=sha256:ece65730d50aa57a1330d86d81582a2d1587b2ca51cb34f586da8551ddc68fee \
|
||||
--hash=sha256:ed21fc515e224727793e4cc3fb3d00f33f59e3a167d3ad6ac1475ab3b05c2f9e \
|
||||
--hash=sha256:eec1132d878153d61a05424f35f089f951bd6095a4f6c60bdd2ef8919d44425e
|
||||
PyQt5==5.11.3 \
|
||||
--hash=sha256:517e4339135c4874b799af0d484bc2e8c27b54850113a68eec40a0b56534f450 \
|
||||
--hash=sha256:ac1eb5a114b6e7788e8be378be41c5e54b17d5158994504e85e43b5fca006a39 \
|
||||
--hash=sha256:d2309296a5a79d0a1c0e6c387c30f0398b65523a6dcc8a19cc172e46b949e00d \
|
||||
--hash=sha256:e85936bae1581bcb908847d2038e5b34237a5e6acc03130099a78930770e7ead
|
||||
PyQt5-sip==4.19.13 \
|
||||
--hash=sha256:125f77c087572c9272219cda030a63c2f996b8507592b2a54d7ef9b75f9f054d \
|
||||
--hash=sha256:14c37b06e3fb7c2234cb208fa461ec4e62b4ba6d8b32ca3753c0b2cfd61b00e3 \
|
||||
--hash=sha256:1cb2cf52979f9085fc0eab7e0b2438eb4430d4aea8edec89762527e17317175b \
|
||||
--hash=sha256:4babef08bccbf223ec34464e1ed0a23caeaeea390ca9a3529227d9a57f0d6ee4 \
|
||||
--hash=sha256:53cb9c1208511cda0b9ed11cffee992a5a2f5d96eb88722569b2ce65ecf6b960 \
|
||||
--hash=sha256:549449d9461d6c665cbe8af4a3808805c5e6e037cd2ce4fd93308d44a049bfac \
|
||||
--hash=sha256:5f5b3089b200ff33de3f636b398e7199b57a6b5c1bb724bdb884580a072a14b5 \
|
||||
--hash=sha256:a4d9bf6e1fa2dd6e73f1873f1a47cee11a6ba0cf9ba8cf7002b28c76823600d0 \
|
||||
--hash=sha256:a4ee6026216f1fbe25c8847f9e0fbce907df5b908f84816e21af16ec7666e6fe \
|
||||
--hash=sha256:a91a308a5e0cc99de1e97afd8f09f46dd7ca20cfaa5890ef254113eebaa1adff \
|
||||
--hash=sha256:b0342540da479d2713edc68fb21f307473f68da896ad5c04215dae97630e0069 \
|
||||
--hash=sha256:f997e21b4e26a3397cb7b255b8d1db5b9772c8e0c94b6d870a5a0ab5c27eacaa
|
||||
setuptools==40.8.0 \
|
||||
--hash=sha256:6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d \
|
||||
--hash=sha256:e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab
|
||||
wheel==0.32.3 \
|
||||
--hash=sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6 \
|
||||
--hash=sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44
|
||||
pip==10.0.1 \
|
||||
--hash=sha256:717cdffb2833be8409433a93746744b59505f42146e8d37de6c62b430e25d6d7 \
|
||||
--hash=sha256:f2bd08e0cd1b06e10218feaf6fef299f473ba706582eb3bd9d52203fdbd7ee68
|
||||
pycryptodomex==3.6.1 \
|
||||
--hash=sha256:1869d7735f445bbf1681afa2acce10ad829857cfb7a4a7b702e484f222021892 \
|
||||
--hash=sha256:24e054190d2b11ad3b8517d186c0b3df6f902a5f5a91be8e4bb6a3fcdc65b2cf \
|
||||
--hash=sha256:26967d31fabb0d80cb2b254a7c0f55f8dec9931e8676891edd24aa5aaeb0d021 \
|
||||
--hash=sha256:2a341b57bb5844d53b8f632f79277cd534762f502fb73bff5dc1a2f615ff91ed \
|
||||
--hash=sha256:43d6eb014aba7be354f3e8fe2693fe96446f6791da2b9570e8d54d481e3ab224 \
|
||||
--hash=sha256:4c271577f4f8c5cced55a60f4504b34545121c14facb8fc357f89c24089c81fc \
|
||||
--hash=sha256:59721f2853df9cf2265304d3b6d6d8cebe3a86b1fddc00f2bfbf18eb2a48fb78 \
|
||||
--hash=sha256:63a77a1b27d12ed1c42f4e539d9dbe588a88b70ec64b55271cdf1f56c1223bd6 \
|
||||
--hash=sha256:6d04640386c55b9f44015747496c3b6582360b5b3b4e42f9ce3fc7c6840f80d0 \
|
||||
--hash=sha256:730bd75d90e16975a112ea79863ce1faa7703d3b54f10d77656e7dadf6be0ef6 \
|
||||
--hash=sha256:75a300aa86c56e9c19a7b476c397cb22fda3be7af4cf2f105990fdd94c52f486 \
|
||||
--hash=sha256:7c6f67005c6e421f02fd7fe9d95876094307b31628d728adc6c2e038e2ed9c09 \
|
||||
--hash=sha256:82b758f870c8dd859f9b58bc9cff007403b68742f9e0376e2cbd8aa2ad3baa83 \
|
||||
--hash=sha256:8528a958b746c4da767bfba5ac370250dcb741f4c69e55873bd6efe89ac07291 \
|
||||
--hash=sha256:93582ea5bc3e8f95cb36d9dd752c01452085b54b396e3ed775ac1793b8dc486a \
|
||||
--hash=sha256:94e0105ad8d82d3bf5a032c92fc03b01e3bc9ea40b58308c2da42f8cf8c16c47 \
|
||||
--hash=sha256:a65889424bf10a884ff031e7f3fd12273dd5b420ee08ca8fcfd431a2f6cbabc1 \
|
||||
--hash=sha256:a8467982d26bfb90089f50c3c5d9ed541b7fe9f9df20803fede70d5046cd4ff1 \
|
||||
--hash=sha256:ab497d4e7361511ede562ed3cd4528f46c005781bc23b1b943612d27bfb078c3 \
|
||||
--hash=sha256:bb05caf3f6cf41d964c01e08dfaddfe48086c7b3e96708d50647f0a29ff33f56 \
|
||||
--hash=sha256:c4643647f5656855975b2aaf70fe3aa1e0c1558f8d1b5de0c9a8ccac65114c57 \
|
||||
--hash=sha256:c550e20834b679ed0b7608c345a816f97047d2297aab4f4599f95edee5d16e99 \
|
||||
--hash=sha256:cc797712add76cd658110585481c380833637b68df1404190777ba715a81c9b9 \
|
||||
--hash=sha256:dff0c883d495bf45d18acc74938d1de4d6a08b3345acb9177a46c6997a578c44 \
|
||||
--hash=sha256:e4f69af1f5b46255ec7b8116a853879a55e8e6b595a73c39f14ca430c410c469 \
|
||||
--hash=sha256:f61d0d83e9dd974849f9b0826ec20f49dbd9ed233fd90bf2592be1337231418e \
|
||||
--hash=sha256:f65f21d2b616c30ad4ba801504343eb768fd0a2894c5f587e784201320556543
|
||||
PyQt5==5.10.1 \
|
||||
--hash=sha256:1e652910bd1ffd23a3a48c510ecad23a57a853ed26b782cd54b16658e6f271ac \
|
||||
--hash=sha256:4db7113f464c733a99fcb66c4c093a47cf7204ad3f8b3bda502efcc0839ac14b \
|
||||
--hash=sha256:9c17ab3974c1fc7bbb04cc1c9dae780522c0ebc158613f3025fccae82227b5f7 \
|
||||
--hash=sha256:f6035baa009acf45e5f460cf88f73580ad5dc0e72330029acd99e477f20a5d61
|
||||
setuptools==39.2.0 \
|
||||
--hash=sha256:8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926 \
|
||||
--hash=sha256:f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2
|
||||
SIP==4.19.8 \
|
||||
--hash=sha256:09f9a4e6c28afd0bafedb26ffba43375b97fe7207bd1a0d3513f79b7d168b331 \
|
||||
--hash=sha256:105edaaa1c8aa486662226360bd3999b4b89dd56de3e314d82b83ed0587d8783 \
|
||||
--hash=sha256:1bb10aac55bd5ab0e2ee74b3047aa2016cfa7932077c73f602a6f6541af8cd51 \
|
||||
--hash=sha256:265ddf69235dd70571b7d4da20849303b436192e875ce7226be7144ca702a45c \
|
||||
--hash=sha256:52074f7cb5488e8b75b52f34ec2230bc75d22986c7fe5cd3f2d266c23f3349a7 \
|
||||
--hash=sha256:5ff887a33839de8fc77d7f69aed0259b67a384dc91a1dc7588e328b0b980bde2 \
|
||||
--hash=sha256:74da4ddd20c5b35c19cda753ce1e8e1f71616931391caeac2de7a1715945c679 \
|
||||
--hash=sha256:7d69e9cf4f8253a3c0dfc5ba6bb9ac8087b8239851f22998e98cb35cfe497b68 \
|
||||
--hash=sha256:97bb93ee0ef01ba90f57be2b606e08002660affd5bc380776dd8b0fcaa9e093a \
|
||||
--hash=sha256:cf98150a99e43fda7ae22abe655b6f202e491d6291486548daa56cb15a2fcf85 \
|
||||
--hash=sha256:d9023422127b94d11c1a84bfa94933e959c484f2c79553c1ef23c69fe00d25f8 \
|
||||
--hash=sha256:e72955e12f4fccf27aa421be383453d697b8a44bde2cc26b08d876fd492d0174
|
||||
wheel==0.31.1 \
|
||||
--hash=sha256:0a2e54558a0628f2145d2fc822137e322412115173e8a2ddbe1c9024338ae83c \
|
||||
--hash=sha256:80044e51ec5bbf6c894ba0bc48d26a8c20a9ba629f4ca19ea26ecfcf87685f5f
|
||||
|
||||
@ -1,56 +1,50 @@
|
||||
btchip-python==0.1.28 \
|
||||
--hash=sha256:da09d0d7a6180d428833795ea9a233c3b317ddfcccea8cc6f0eba59435e5dd83
|
||||
certifi==2018.11.29 \
|
||||
--hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \
|
||||
--hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033
|
||||
btchip-python==0.1.27 \
|
||||
--hash=sha256:e58a941abbb2d8901bf4858baa18012537c60812c7f895f9a039113ecce3032b
|
||||
certifi==2018.4.16 \
|
||||
--hash=sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7 \
|
||||
--hash=sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
|
||||
ckcc-protocol==0.7.2 \
|
||||
--hash=sha256:31ee5178cfba8895eb2a6b8d06dc7830b51461a0ff767a670a64707c63e6b264 \
|
||||
--hash=sha256:498db4ccdda018cd9f40210f5bd02ddcc98e7df583170b2eab4035c86c3cc03b
|
||||
click==7.0 \
|
||||
--hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \
|
||||
--hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7
|
||||
construct==2.9.45 \
|
||||
--hash=sha256:2271a0efd0798679dea825ff47e22a4c550456a5db0ba8baa82f7eae0af0118c
|
||||
Cython==0.29.4 \
|
||||
--hash=sha256:004eeb2fc64e9db4a3bc0d65583d69769c7242d29d9335121cbab776688dc122 \
|
||||
--hash=sha256:028ee8571884a129e0d5c4d48296f6b3ea679668c096bb65fe8b2ff7ac29d707 \
|
||||
--hash=sha256:162b8b794ca9210c7039d54b6d96cd342e0404e41e7e467baae69f0252d7e52a \
|
||||
--hash=sha256:1aba4cf581d203e8fa3b6a7b432b09416e4f93c0d1f7744834acacfe3e9db424 \
|
||||
--hash=sha256:1be8f08c87b92a880f2fd19f93293e738ca8647834ad05625635320cec9ecad4 \
|
||||
--hash=sha256:21c707a811912aeb65abe8a66e5adebc759889661c8f4cf677523cd33c609084 \
|
||||
--hash=sha256:234de250ef09ba667fc6a8f6ba07712d3fe5bb8d92d70d2b958d4c56e3172c4a \
|
||||
--hash=sha256:33dad82003df518e1242ac3b0592fc63c49d65d0d37b696cb43b7d35085e6bd5 \
|
||||
--hash=sha256:54ee6cbc1397b27670e598ae15cab36e826a01605f63bf267a5fd2642bd8a147 \
|
||||
--hash=sha256:6058c57657d2704c9fad8a56458173d2f525dce4083ca46e9b99b1b35da2b27f \
|
||||
--hash=sha256:6d3065f39ea1354eba4807e2752e97d57f26d6f68bc4a4c561264ca4300c46cb \
|
||||
--hash=sha256:7059e5acac1d7a82e75e553924d9ea59b0e79203adf903cb999287fbcc8f50f1 \
|
||||
--hash=sha256:71c31e01f20a3a7273f6f38760d29170ee89e895be540481130cb173ef6b7246 \
|
||||
--hash=sha256:89225447801e8bd0f6d8e2c0807ded83af8ad7bf4086b5ecf1f22c5a68d1b3e3 \
|
||||
--hash=sha256:9783f11fe4a4af66b0aa0da68fda833c10b95edd9099a6dbe710d03bcb96adf2 \
|
||||
--hash=sha256:9a0be0aac30d71fe490a2b0377fca6e13a5242ecc01d09c7a358f1f2fcb07a80 \
|
||||
--hash=sha256:9a2cccc26dcf2df1e0048cdf63bd714f1d5dfad457f03b9938c5cc3eef74c9ab \
|
||||
--hash=sha256:b0889310f8558eb406a4a853d63553b90c621476f1b5b80b46b1ff57eef198cf \
|
||||
--hash=sha256:c46ef7b771c88512435399e5ffbc3a70079d4945123d6fbfc6211b4cfdc4e546 \
|
||||
--hash=sha256:c71a77c1047d65e5b4e614053cbb7b567c36359b2bc1d27fba23b984ab6dddd0 \
|
||||
--hash=sha256:c9361811a1a49db11efce54fedd01a5544af8db074fce471c720bdb85ec9c7a8 \
|
||||
--hash=sha256:d021a8326a1d2cdb182b0dd7f49bb42d8a4e6ddfb3c8d388ee5be26d57d49f3b \
|
||||
--hash=sha256:d1ee3d39c73a094ae5b6e2f9263ae0dc61af1b549a0869ade8c3c30325ed9f26 \
|
||||
--hash=sha256:d49d7cf82192edc6e386262a07ceb3515028afbd9009dd8ec669d2c0a9f20128 \
|
||||
--hash=sha256:dc5fc1fa072a98f152e46465aaf3e02b3ea36a9d3b8c79bfabd47b0e3ad9226c \
|
||||
--hash=sha256:e290fed7fe73860657af564e596fff87e75cfda861c067e89212970a47826cc6 \
|
||||
--hash=sha256:fcf9a9a566ab98495db641eefee471eb03df71e394ee51fdfa9b4c0b9f6928eb \
|
||||
--hash=sha256:fe8c1d2538867bf2753988a4a2d548bcb211fcbba125aa3e9092391b16f47b56
|
||||
click==6.7 \
|
||||
--hash=sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d \
|
||||
--hash=sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b
|
||||
Cython==0.28.3 \
|
||||
--hash=sha256:0344e9352b0915910e212c38403b63f902ce1cba75dde7a43a9112ff960eb2a5 \
|
||||
--hash=sha256:0a390c39e912fc5f82d5feae2d16ea061971407099e1efb0fecb255cb96fbeff \
|
||||
--hash=sha256:0f2b2e09f94c498f555935e732b7321b5f62f00e7a789238f6c5ddd66987a54d \
|
||||
--hash=sha256:15614592616b6dd5e919e158796350ebeba6cb6b5d2998cfff41b53f568c8355 \
|
||||
--hash=sha256:1aae6d6e9858888144cea147eb5e677830f45faaff3d305d77378c3cba55f526 \
|
||||
--hash=sha256:200583297f23e558744bc4688d8a2b2605ab6ad7d1494a9fd8c8094ad65ebf3c \
|
||||
--hash=sha256:295facc211a6b55db9979455b856180f2839be22ab767ffdea55986bee83ca9f \
|
||||
--hash=sha256:36c16bf39280fe857213d8da31c07a6179d3878c3dc2e435dce0974b9f8f0729 \
|
||||
--hash=sha256:3fef8dfa9cf86ab7814ca31369374ddd5b9524f54406aa83b53b5937965b8e88 \
|
||||
--hash=sha256:439d233d3214e3d69c033a9a93516758f2c8a03e83ea51ae14b6eed13687d224 \
|
||||
--hash=sha256:455ab39c6c0849a6c008fcdf2fae42475f18d0801a3be229e8f75367bbe3b325 \
|
||||
--hash=sha256:56821e3791209e6a11992e294afbf7e3dcda7d4fd54d06396dd521928d3d14fe \
|
||||
--hash=sha256:62b594584889b33bbea7e71f9d7c5c6539091b341334ef7ca1ae7e30a9dd3e15 \
|
||||
--hash=sha256:70f81a75fb25c1c3c61843e3a6fe771a76c4ebf4d154455a7eff0740ad47dff4 \
|
||||
--hash=sha256:8011090beb09251cb4ece1e14263e574b38eda696b788552b369ad343373d0e9 \
|
||||
--hash=sha256:80d6a0369333a162fc32a22637f5870f3e87fb038c7b58860bbe00b05b58aa62 \
|
||||
--hash=sha256:85b04e32af58a3c008c0ba8169017770aaa342a5972b748f81d043d66363e437 \
|
||||
--hash=sha256:9ed273d82116fa148c92901b9639030e087979d455982bd7bf727fb486c0bd17 \
|
||||
--hash=sha256:a1af59e6c9b4acc07c429d8495fc016a35e0a1270f28c57317352f512df7e214 \
|
||||
--hash=sha256:b894ff4daf8dfaf657bf2d5e7190a4de11b2400b1e0fb0902974d35c23a26dea \
|
||||
--hash=sha256:c2659981150b4de04397dcfd4bff64e384d3ba25af60d1b22820fdf108298cb2 \
|
||||
--hash=sha256:c981a750858f1727995acf861ab030b267d264ca6efda2f01104941187a3675f \
|
||||
--hash=sha256:cc4152b19ec168391f7815d24b70c8911829ba281bd5fcd98cab9dc21abe62ff \
|
||||
--hash=sha256:d0f5b1668e7f7f6fc9849f49a20c5db10562a0ab29cd66818894dfebbca7b304 \
|
||||
--hash=sha256:d7152006ed1a3adb8f978077b57d237ddafa188240af53cd72b5c79e4ed000e3 \
|
||||
--hash=sha256:e5f877472993474296125c22b84c334b550010815e513cccce73da854a132d64 \
|
||||
--hash=sha256:e7c2c87ff2f99ed4be1bb046d6eddfb388af627928037f9e0a420c05daaf14ed \
|
||||
--hash=sha256:edd7d499685655031be5b4d33005096b6345f81eeb7ab9d2dd415db0c7bcf64e \
|
||||
--hash=sha256:f99a777fda569a88deea863eac2722b5e88957c4d5f4413949740da791857ac9
|
||||
ecdsa==0.13 \
|
||||
--hash=sha256:40d002cf360d0e035cf2cb985e1308d41aaa087cbfc135b2dc2d844296ea546c \
|
||||
--hash=sha256:64cf1ee26d1cde3c73c6d7d107f835fed7c6a2904aef9eac223d57ad800c43fa
|
||||
hidapi==0.7.99.post21 \
|
||||
--hash=sha256:1ac170f4d601c340f2cd52fd06e85c5e77bad7ceac811a7bb54b529f7dc28c24 \
|
||||
--hash=sha256:6424ad75da0021ce8c1bcd78056a04adada303eff3c561f8d132b85d0a914cb3 \
|
||||
--hash=sha256:8d3be666f464347022e2b47caf9132287885d9eacc7895314fc8fefcb4e42946 \
|
||||
--hash=sha256:92878bad7324dee619b7832fbfc60b5360d378aa7c5addbfef0a410d8fd342c7 \
|
||||
--hash=sha256:b4b1f6aff0192e9be153fe07c1b7576cb7a1ff52e78e3f76d867be95301a8e87 \
|
||||
--hash=sha256:bf03f06f586ce7d8aeb697a94b7dba12dc9271aae92d7a8d4486360ff711a660 \
|
||||
--hash=sha256:c76de162937326fcd57aa399f94939ce726242323e65c15c67e183da1f6c26f7 \
|
||||
@ -58,42 +52,36 @@ hidapi==0.7.99.post21 \
|
||||
--hash=sha256:d4b5787a04613503357606bb10e59c3e2c1114fa00ee328b838dd257f41cbd7b \
|
||||
--hash=sha256:e0be1aa6566979266a8fc845ab0e18613f4918cf2c977fe67050f5dc7e2a9a97 \
|
||||
--hash=sha256:edfb16b16a298717cf05b8c8a9ad1828b6ff3de5e93048ceccd74e6ae4ff0922
|
||||
idna==2.8 \
|
||||
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
|
||||
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
|
||||
keepkey==6.0.2 \
|
||||
--hash=sha256:3236dd701bde74768c41a92e724e322ea5e01b90985e2e6215eb85b77f9a0ae1 \
|
||||
--hash=sha256:677e07deacc2ff97bee313b8dd7ae55faebab02e7d17b9a8e49b889996a36010 \
|
||||
--hash=sha256:af107f610fb0e2417fc7a9d87a2fa22aac9b80b79559370d178be424bb85489a
|
||||
libusb1==1.7 \
|
||||
--hash=sha256:9d4f66d2ed699986b06bc3082cd262101cb26af7a76a34bd15b7eb56cba37e0f
|
||||
idna==2.7 \
|
||||
--hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \
|
||||
--hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16
|
||||
keepkey==4.0.2 \
|
||||
--hash=sha256:cddee60ae405841cdff789cbc54168ceaeb2282633420f2be155554c25c69138
|
||||
libusb1==1.6.4 \
|
||||
--hash=sha256:8c930d9c1d037d9c83924c82608aa6a1adcaa01ca0e4a23ee0e8e18d7eee670d
|
||||
mnemonic==0.18 \
|
||||
--hash=sha256:02a7306a792370f4a0c106c2cf1ce5a0c84b9dbd7e71c6792fdb9ad88a727f1d
|
||||
pbkdf2==1.3 \
|
||||
--hash=sha256:ac6397369f128212c43064a2b4878038dab78dab41875364554aaf2a684e6979
|
||||
pip==19.0.1 \
|
||||
--hash=sha256:aae79c7afe895fb986ec751564f24d97df1331bb99cdfec6f70dada2f40c0044 \
|
||||
--hash=sha256:e81ddd35e361b630e94abeda4a1eddd36d47a90e71eb00f38f46b57f787cd1a5
|
||||
protobuf==3.6.1 \
|
||||
--hash=sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4 \
|
||||
--hash=sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811 \
|
||||
--hash=sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444 \
|
||||
--hash=sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96 \
|
||||
--hash=sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2 \
|
||||
--hash=sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef \
|
||||
--hash=sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e \
|
||||
--hash=sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995 \
|
||||
--hash=sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed \
|
||||
--hash=sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9 \
|
||||
--hash=sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90 \
|
||||
--hash=sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19 \
|
||||
--hash=sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625 \
|
||||
--hash=sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9 \
|
||||
--hash=sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e \
|
||||
--hash=sha256:e7a5ccf56444211d79e3204b05087c1460c212a2c7d62f948b996660d0165d68 \
|
||||
--hash=sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10
|
||||
pyaes==1.6.1 \
|
||||
--hash=sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f
|
||||
pip==10.0.1 \
|
||||
--hash=sha256:717cdffb2833be8409433a93746744b59505f42146e8d37de6c62b430e25d6d7 \
|
||||
--hash=sha256:f2bd08e0cd1b06e10218feaf6fef299f473ba706582eb3bd9d52203fdbd7ee68
|
||||
protobuf==3.6.0 \
|
||||
--hash=sha256:12985d9f40c104da2f44ec089449214876809b40fdc5d9e43b93b512b9e74056 \
|
||||
--hash=sha256:12c97fe27af12fc5d66b23f905ab09dd4fb0c68d5a74a419d914580e6d2e71e3 \
|
||||
--hash=sha256:327fb9d8a8247bc780b9ea7ed03c0643bc0d22c139b761c9ec1efc7cc3f0923e \
|
||||
--hash=sha256:3895319db04c0b3baed74fb66be7ba9f4cd8e88a432b8e71032cdf08b2dfee23 \
|
||||
--hash=sha256:695072063e256d32335d48b9484451f7c7948edc3dbd419469d6a778602682fc \
|
||||
--hash=sha256:7d786f3ef5b33a04e6538089674f244a3b0f588155016559d950989010af97d0 \
|
||||
--hash=sha256:8bf82bb7a466a54be7272dcb492f71d55a2453a58d862fb74c3f2083f2768543 \
|
||||
--hash=sha256:9bbc1ae1c33c1bd3a2fc05a3aec328544d2b039ff0ce6f000063628a32fad777 \
|
||||
--hash=sha256:9e992c68103ab5635728d29fcf132c669cb4e2db24d012685210276185009d17 \
|
||||
--hash=sha256:9f1087abb67b34e55108bc610936b34363a7aac692023bcbb17e065c253a1f80 \
|
||||
--hash=sha256:9fefcb92a3784b446abf3641d9a14dad815bee88e0edd10b9a9e0e144d01a991 \
|
||||
--hash=sha256:a37836aa47d1b81c2db1a6b7a5e79926062b5d76bd962115a0e615551be2b48d \
|
||||
--hash=sha256:cca22955443c55cf86f963a4ad7057bca95e4dcde84d6a493066d380cfab3bb0 \
|
||||
--hash=sha256:d7ac50bc06d31deb07ace6de85556c1d7330e5c0958f3b2af85037d6d1182abf \
|
||||
--hash=sha256:dfe6899304b898538f4dc94fa0b281b56b70e40f58afa4c6f807805261cbe2e8
|
||||
pyblake2==1.1.2 \
|
||||
--hash=sha256:3757f7ad709b0e1b2a6b3919fa79fe3261f166fc375cd521f2be480f8319dde9 \
|
||||
--hash=sha256:407e02c7f8f36fcec1b7aa114ddca0c1060c598142ea6f6759d03710b946a7e3 \
|
||||
@ -104,31 +92,24 @@ pyblake2==1.1.2 \
|
||||
--hash=sha256:baa2190bfe549e36163aa44664d4ee3a9080b236fc5d42f50dc6fd36bbdc749e \
|
||||
--hash=sha256:c53417ee0bbe77db852d5fd1036749f03696ebc2265de359fe17418d800196c4 \
|
||||
--hash=sha256:fbc9fcde75713930bc2a91b149e97be2401f7c9c56d735b46a109210f58d7358
|
||||
requests==2.21.0 \
|
||||
--hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
|
||||
--hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b
|
||||
safet==0.1.4 \
|
||||
--hash=sha256:522c257910f9472e9c77c487425ed286f6721c314653e232bc41c6cedece1bb1 \
|
||||
--hash=sha256:b152874acdc89ff0c8b2d680bfbf020b3e53527c2ad3404489dd61a548aa56a1
|
||||
setuptools==40.8.0 \
|
||||
--hash=sha256:6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d \
|
||||
--hash=sha256:e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab
|
||||
six==1.12.0 \
|
||||
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
|
||||
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73
|
||||
trezor==0.11.1 \
|
||||
--hash=sha256:6043f321d856e1b45b9df0c37810264f08d065bb56cd999f61a05fe2906e9e18 \
|
||||
--hash=sha256:6119b30cf9a136667753935bd06c5f341e78950b35e8ccbadaecc65c12f1946d
|
||||
typing-extensions==3.7.2 \
|
||||
--hash=sha256:07b2c978670896022a43c4b915df8958bec4a6b84add7f2c87b2b728bda3ba64 \
|
||||
--hash=sha256:f3f0e67e1d42de47b5c67c32c9b26641642e9170fe7e292991793705cd5fef7c \
|
||||
--hash=sha256:fb2cd053238d33a8ec939190f30cfd736c00653a85a2919415cecf7dc3d9da71
|
||||
urllib3==1.24.1 \
|
||||
--hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \
|
||||
--hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22
|
||||
websocket-client==0.54.0 \
|
||||
--hash=sha256:8c8bf2d4f800c3ed952df206b18c28f7070d9e3dcbd6ca6291127574f57ee786 \
|
||||
--hash=sha256:e51562c91ddb8148e791f0155fdb01325d99bb52c4cdbb291aee7a3563fd0849
|
||||
wheel==0.32.3 \
|
||||
--hash=sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6 \
|
||||
--hash=sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44
|
||||
requests==2.19.1 \
|
||||
--hash=sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1 \
|
||||
--hash=sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a
|
||||
setuptools==39.2.0 \
|
||||
--hash=sha256:8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926 \
|
||||
--hash=sha256:f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2
|
||||
six==1.11.0 \
|
||||
--hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \
|
||||
--hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb
|
||||
trezor==0.10.1 \
|
||||
--hash=sha256:09b4edfa83b787975c6f30728c13bb413621d5bdf722231748758ba0181b8a60 \
|
||||
--hash=sha256:5bcad3e97129fccd6f8b4cf08f81862e423373617c857feb492cfa1b1807844e
|
||||
urllib3==1.23 \
|
||||
--hash=sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf \
|
||||
--hash=sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5
|
||||
websocket-client==0.48.0 \
|
||||
--hash=sha256:18f1170e6a1b5463986739d9fd45c4308b0d025c1b2f9b88788d8f69e8a5eb4a \
|
||||
--hash=sha256:db70953ae4a064698b27ae56dcad84d0ee68b7b43cb40940f537738f38f510c1
|
||||
wheel==0.31.1 \
|
||||
--hash=sha256:0a2e54558a0628f2145d2fc822137e322412115173e8a2ddbe1c9024338ae83c \
|
||||
--hash=sha256:80044e51ec5bbf6c894ba0bc48d26a8c20a9ba629f4ca19ea26ecfcf87685f5f
|
||||
|
||||
@ -1,155 +1,64 @@
|
||||
aiohttp==3.5.4 \
|
||||
--hash=sha256:00d198585474299c9c3b4f1d5de1a576cc230d562abc5e4a0e81d71a20a6ca55 \
|
||||
--hash=sha256:0155af66de8c21b8dba4992aaeeabf55503caefae00067a3b1139f86d0ec50ed \
|
||||
--hash=sha256:09654a9eca62d1bd6d64aa44db2498f60a5c1e0ac4750953fdd79d5c88955e10 \
|
||||
--hash=sha256:199f1d106e2b44b6dacdf6f9245493c7d716b01d0b7fbe1959318ba4dc64d1f5 \
|
||||
--hash=sha256:296f30dedc9f4b9e7a301e5cc963012264112d78a1d3094cd83ef148fdf33ca1 \
|
||||
--hash=sha256:368ed312550bd663ce84dc4b032a962fcb3c7cae099dbbd48663afc305e3b939 \
|
||||
--hash=sha256:40d7ea570b88db017c51392349cf99b7aefaaddd19d2c78368aeb0bddde9d390 \
|
||||
--hash=sha256:629102a193162e37102c50713e2e31dc9a2fe7ac5e481da83e5bb3c0cee700aa \
|
||||
--hash=sha256:6d5ec9b8948c3d957e75ea14d41e9330e1ac3fed24ec53766c780f82805140dc \
|
||||
--hash=sha256:87331d1d6810214085a50749160196391a712a13336cd02ce1c3ea3d05bcf8d5 \
|
||||
--hash=sha256:9a02a04bbe581c8605ac423ba3a74999ec9d8bce7ae37977a3d38680f5780b6d \
|
||||
--hash=sha256:9c4c83f4fa1938377da32bc2d59379025ceeee8e24b89f72fcbccd8ca22dc9bf \
|
||||
--hash=sha256:9cddaff94c0135ee627213ac6ca6d05724bfe6e7a356e5e09ec57bd3249510f6 \
|
||||
--hash=sha256:a25237abf327530d9561ef751eef9511ab56fd9431023ca6f4803f1994104d72 \
|
||||
--hash=sha256:a5cbd7157b0e383738b8e29d6e556fde8726823dae0e348952a61742b21aeb12 \
|
||||
--hash=sha256:a97a516e02b726e089cffcde2eea0d3258450389bbac48cbe89e0f0b6e7b0366 \
|
||||
--hash=sha256:acc89b29b5f4e2332d65cd1b7d10c609a75b88ef8925d487a611ca788432dfa4 \
|
||||
--hash=sha256:b05bd85cc99b06740aad3629c2585bda7b83bd86e080b44ba47faf905fdf1300 \
|
||||
--hash=sha256:c2bec436a2b5dafe5eaeb297c03711074d46b6eb236d002c13c42f25c4a8ce9d \
|
||||
--hash=sha256:cc619d974c8c11fe84527e4b5e1c07238799a8c29ea1c1285149170524ba9303 \
|
||||
--hash=sha256:d4392defd4648badaa42b3e101080ae3313e8f4787cb517efd3f5b8157eaefd6 \
|
||||
--hash=sha256:e1c3c582ee11af7f63a34a46f0448fca58e59889396ffdae1f482085061a2889
|
||||
aiohttp-socks==0.2.2 \
|
||||
--hash=sha256:e473ee222b001fe33798957b9ce3352b32c187cf41684f8e2259427925914993 \
|
||||
--hash=sha256:eebd8939a7c3c1e3e7e1b2552c60039b4c65ef6b8b2351efcbdd98290538e310
|
||||
aiorpcX==0.10.4 \
|
||||
--hash=sha256:7130105d31230f069b0eea4e1893c7199cfe2d89a52a31aec718d37f4449935d \
|
||||
--hash=sha256:e6dfd584f597ee3aa6a8d4cb5755c8ffbbe42754f32728561d9e5940379d5096
|
||||
async_timeout==3.0.1 \
|
||||
--hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \
|
||||
--hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3
|
||||
attrs==18.2.0 \
|
||||
--hash=sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69 \
|
||||
--hash=sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb
|
||||
certifi==2018.11.29 \
|
||||
--hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \
|
||||
--hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033
|
||||
certifi==2018.4.16 \
|
||||
--hash=sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7 \
|
||||
--hash=sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
|
||||
dnspython==1.16.0 \
|
||||
--hash=sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01 \
|
||||
--hash=sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d
|
||||
dnspython==1.15.0 \
|
||||
--hash=sha256:40f563e1f7a7b80dc5a4e76ad75c23da53d62f1e15e6e517293b04e1f84ead7c \
|
||||
--hash=sha256:861e6e58faa730f9845aaaa9c6c832851fbf89382ac52915a51f89c71accdd31
|
||||
ecdsa==0.13 \
|
||||
--hash=sha256:40d002cf360d0e035cf2cb985e1308d41aaa087cbfc135b2dc2d844296ea546c \
|
||||
--hash=sha256:64cf1ee26d1cde3c73c6d7d107f835fed7c6a2904aef9eac223d57ad800c43fa
|
||||
idna==2.8 \
|
||||
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
|
||||
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
|
||||
idna_ssl==1.1.0 \
|
||||
--hash=sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c
|
||||
jsonrpclib-pelix==0.4.0 \
|
||||
--hash=sha256:19c558e169a51480b39548783067ca55046b62b2409ab4559931255e12f635de \
|
||||
--hash=sha256:a966d17f2f739ee89031cf5c807d85d92db6b2715fb2b2f8a88bbfc87f468b12
|
||||
multidict==4.5.2 \
|
||||
--hash=sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f \
|
||||
--hash=sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3 \
|
||||
--hash=sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef \
|
||||
--hash=sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b \
|
||||
--hash=sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73 \
|
||||
--hash=sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc \
|
||||
--hash=sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3 \
|
||||
--hash=sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd \
|
||||
--hash=sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351 \
|
||||
--hash=sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941 \
|
||||
--hash=sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d \
|
||||
--hash=sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1 \
|
||||
--hash=sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b \
|
||||
--hash=sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a \
|
||||
--hash=sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3 \
|
||||
--hash=sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7 \
|
||||
--hash=sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0 \
|
||||
--hash=sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0 \
|
||||
--hash=sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014 \
|
||||
--hash=sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5 \
|
||||
--hash=sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036 \
|
||||
--hash=sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d \
|
||||
--hash=sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a \
|
||||
--hash=sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce \
|
||||
--hash=sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1 \
|
||||
--hash=sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a \
|
||||
--hash=sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9 \
|
||||
--hash=sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7 \
|
||||
--hash=sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b
|
||||
pip==19.0.1 \
|
||||
--hash=sha256:aae79c7afe895fb986ec751564f24d97df1331bb99cdfec6f70dada2f40c0044 \
|
||||
--hash=sha256:e81ddd35e361b630e94abeda4a1eddd36d47a90e71eb00f38f46b57f787cd1a5
|
||||
protobuf==3.6.1 \
|
||||
--hash=sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4 \
|
||||
--hash=sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811 \
|
||||
--hash=sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444 \
|
||||
--hash=sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96 \
|
||||
--hash=sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2 \
|
||||
--hash=sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef \
|
||||
--hash=sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e \
|
||||
--hash=sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995 \
|
||||
--hash=sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed \
|
||||
--hash=sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9 \
|
||||
--hash=sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90 \
|
||||
--hash=sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19 \
|
||||
--hash=sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625 \
|
||||
--hash=sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9 \
|
||||
--hash=sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e \
|
||||
--hash=sha256:e7a5ccf56444211d79e3204b05087c1460c212a2c7d62f948b996660d0165d68 \
|
||||
--hash=sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10
|
||||
idna==2.7 \
|
||||
--hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \
|
||||
--hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16
|
||||
jsonrpclib-pelix==0.3.1 \
|
||||
--hash=sha256:5417b1508d5a50ec64f6e5b88907f111155d52607b218ff3ba9a777afb2e49e3 \
|
||||
--hash=sha256:bd89a6093bc4d47dc8a096197aacb827359944a4533be5193f3845f57b9f91b4
|
||||
pbkdf2==1.3 \
|
||||
--hash=sha256:ac6397369f128212c43064a2b4878038dab78dab41875364554aaf2a684e6979
|
||||
pip==10.0.1 \
|
||||
--hash=sha256:717cdffb2833be8409433a93746744b59505f42146e8d37de6c62b430e25d6d7 \
|
||||
--hash=sha256:f2bd08e0cd1b06e10218feaf6fef299f473ba706582eb3bd9d52203fdbd7ee68
|
||||
protobuf==3.6.0 \
|
||||
--hash=sha256:12985d9f40c104da2f44ec089449214876809b40fdc5d9e43b93b512b9e74056 \
|
||||
--hash=sha256:12c97fe27af12fc5d66b23f905ab09dd4fb0c68d5a74a419d914580e6d2e71e3 \
|
||||
--hash=sha256:327fb9d8a8247bc780b9ea7ed03c0643bc0d22c139b761c9ec1efc7cc3f0923e \
|
||||
--hash=sha256:3895319db04c0b3baed74fb66be7ba9f4cd8e88a432b8e71032cdf08b2dfee23 \
|
||||
--hash=sha256:695072063e256d32335d48b9484451f7c7948edc3dbd419469d6a778602682fc \
|
||||
--hash=sha256:7d786f3ef5b33a04e6538089674f244a3b0f588155016559d950989010af97d0 \
|
||||
--hash=sha256:8bf82bb7a466a54be7272dcb492f71d55a2453a58d862fb74c3f2083f2768543 \
|
||||
--hash=sha256:9bbc1ae1c33c1bd3a2fc05a3aec328544d2b039ff0ce6f000063628a32fad777 \
|
||||
--hash=sha256:9e992c68103ab5635728d29fcf132c669cb4e2db24d012685210276185009d17 \
|
||||
--hash=sha256:9f1087abb67b34e55108bc610936b34363a7aac692023bcbb17e065c253a1f80 \
|
||||
--hash=sha256:9fefcb92a3784b446abf3641d9a14dad815bee88e0edd10b9a9e0e144d01a991 \
|
||||
--hash=sha256:a37836aa47d1b81c2db1a6b7a5e79926062b5d76bd962115a0e615551be2b48d \
|
||||
--hash=sha256:cca22955443c55cf86f963a4ad7057bca95e4dcde84d6a493066d380cfab3bb0 \
|
||||
--hash=sha256:d7ac50bc06d31deb07ace6de85556c1d7330e5c0958f3b2af85037d6d1182abf \
|
||||
--hash=sha256:dfe6899304b898538f4dc94fa0b281b56b70e40f58afa4c6f807805261cbe2e8
|
||||
pyaes==1.6.1 \
|
||||
--hash=sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f
|
||||
QDarkStyle==2.5.4 \
|
||||
--hash=sha256:3eb60922b8c4d9cedecb6897ca4c9f8a259d81bdefe5791976ccdf12432de1f0 \
|
||||
--hash=sha256:51331fc6490b38c376e6ba8d8c814320c8d2d1c2663055bc396321a7c28fa8be
|
||||
qrcode==6.1 \
|
||||
--hash=sha256:3996ee560fc39532910603704c82980ff6d4d5d629f9c3f25f34174ce8606cf5 \
|
||||
--hash=sha256:505253854f607f2abf4d16092c61d4e9d511a3b4392e60bff957a68592b04369
|
||||
setuptools==40.8.0 \
|
||||
--hash=sha256:6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d \
|
||||
--hash=sha256:e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab
|
||||
six==1.12.0 \
|
||||
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
|
||||
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73
|
||||
typing-extensions==3.7.2 \
|
||||
--hash=sha256:07b2c978670896022a43c4b915df8958bec4a6b84add7f2c87b2b728bda3ba64 \
|
||||
--hash=sha256:f3f0e67e1d42de47b5c67c32c9b26641642e9170fe7e292991793705cd5fef7c \
|
||||
--hash=sha256:fb2cd053238d33a8ec939190f30cfd736c00653a85a2919415cecf7dc3d9da71
|
||||
wheel==0.32.3 \
|
||||
--hash=sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6 \
|
||||
--hash=sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44
|
||||
yarl==1.3.0 \
|
||||
--hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \
|
||||
--hash=sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f \
|
||||
--hash=sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb \
|
||||
--hash=sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320 \
|
||||
--hash=sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842 \
|
||||
--hash=sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0 \
|
||||
--hash=sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829 \
|
||||
--hash=sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310 \
|
||||
--hash=sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4 \
|
||||
--hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \
|
||||
--hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1
|
||||
colorama==0.4.1 \
|
||||
--hash=sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d \
|
||||
--hash=sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48
|
||||
pylibscrypt==1.7.1 \
|
||||
--hash=sha256:7aa9424e211a12106c67ea884ccfe609856289372b900d3702faaf66e87f79ac
|
||||
scrypt==0.8.6 \
|
||||
--hash=sha256:85919f023148cd9fb01d75ad4e3e061928c298fa6249a0cd6cd469c4b947595e \
|
||||
--hash=sha256:4ad7188f2e42dbee2ff1cd72e3da40b170ba41847effbf0d726444f62ae60f3a \
|
||||
--hash=sha256:bc131f74a688fa09993c518ca666a2ebd4268b207e039cbab03a034228140d3e \
|
||||
--hash=sha256:232acdbc3434d2de55def8d5dbf1bc4b9bfc50da7c5741df2a6eebc4e18d3720 \
|
||||
--hash=sha256:971db040d3963ebe4b919a203fe10d7d6659951d3644066314330983dc175ed4 \
|
||||
--hash=sha256:475ac80239b3d788ae71a09c3019ca915e149aaa339adcdd1c9eef121293dc88 \
|
||||
--hash=sha256:18ccbc63d87c6f89b753194194bb37aeaf1abc517e4b989461d115c1d93ce128 \
|
||||
--hash=sha256:c23daecee405cb036845917295c76f8d747fc890158df40cb304b4b3c3640079 \
|
||||
--hash=sha256:f8239b2d47fa1d40bc27efd231dc7083695d10c1c2ac51a99380360741e0362d
|
||||
|
||||
PySocks==1.6.8 \
|
||||
--hash=sha256:3fe52c55890a248676fd69dc9e3c4e811718b777834bcaab7a8125cf9deac672
|
||||
qrcode==6.0 \
|
||||
--hash=sha256:037b0db4c93f44586e37f84c3da3f763874fcac85b2974a69a98e399ac78e1bf \
|
||||
--hash=sha256:de4ffc15065e6ff20a551ad32b6b41264f3c75275675406ddfa8e3530d154be3
|
||||
requests==2.19.1 \
|
||||
--hash=sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1 \
|
||||
--hash=sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a
|
||||
setuptools==39.2.0 \
|
||||
--hash=sha256:8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926 \
|
||||
--hash=sha256:f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2
|
||||
six==1.11.0 \
|
||||
--hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \
|
||||
--hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb
|
||||
urllib3==1.23 \
|
||||
--hash=sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf \
|
||||
--hash=sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5
|
||||
wheel==0.31.1 \
|
||||
--hash=sha256:0a2e54558a0628f2145d2fc822137e322412115173e8a2ddbe1c9024338ae83c \
|
||||
--hash=sha256:80044e51ec5bbf6c894ba0bc48d26a8c20a9ba629f4ca19ea26ecfcf87685f5f
|
||||
colorama==0.3.9 \
|
||||
--hash=sha256:463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda \
|
||||
--hash=sha256:48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
#!/bin/bash
|
||||
# Run this after a new release to update dependencies
|
||||
|
||||
set -e
|
||||
|
||||
venv_dir=~/.electrum-venv
|
||||
contrib=$(dirname "$0")
|
||||
|
||||
|
||||
@ -1,25 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
CONTRIB="$(dirname "$(readlink -e "$0")")"
|
||||
ROOT_FOLDER="$CONTRIB"/..
|
||||
PACKAGES="$ROOT_FOLDER"/packages/
|
||||
LOCALE="$ROOT_FOLDER"/electrum/locale/
|
||||
|
||||
if [ ! -d "$LOCALE" ]; then
|
||||
echo "Run make_locale first!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$PACKAGES" ]; then
|
||||
echo "Run make_packages first!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pushd ./electrum/gui/kivy/
|
||||
|
||||
make theming
|
||||
pushd ./gui/kivy/
|
||||
|
||||
if [[ -n "$1" && "$1" == "release" ]] ; then
|
||||
echo -n Keystore Password:
|
||||
|
||||
@ -24,7 +24,6 @@ string = string.replace("##VERSION_APK##", APK_VERSION)
|
||||
|
||||
files = {
|
||||
'tgz': "Electrum-%s.tar.gz" % version,
|
||||
'appimage': "electrum-%s-x86_64.AppImage" % version,
|
||||
'zip': "Electrum-%s.zip" % version,
|
||||
'mac': "electrum-%s.dmg" % version_mac,
|
||||
'win': "electrum-%s.exe" % version_win,
|
||||
|
||||
@ -3,17 +3,13 @@ import os
|
||||
import subprocess
|
||||
import io
|
||||
import zipfile
|
||||
import sys
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError as e:
|
||||
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
|
||||
import requests
|
||||
|
||||
os.chdir(os.path.dirname(os.path.realpath(__file__)))
|
||||
os.chdir('..')
|
||||
|
||||
cmd = "find electrum -type f -name '*.py' -o -name '*.kv'"
|
||||
code_directories = 'gui plugins lib'
|
||||
cmd = "find {} -type f -name '*.py' -o -name '*.kv'".format(code_directories)
|
||||
|
||||
files = subprocess.check_output(cmd, shell=True)
|
||||
|
||||
@ -23,13 +19,13 @@ with open("app.fil", "wb") as f:
|
||||
print("Found {} files to translate".format(len(files.splitlines())))
|
||||
|
||||
# Generate fresh translation template
|
||||
if not os.path.exists('electrum/locale'):
|
||||
os.mkdir('electrum/locale')
|
||||
cmd = 'xgettext -s --from-code UTF-8 --language Python --no-wrap -f app.fil --output=electrum/locale/messages.pot'
|
||||
if not os.path.exists('lib/locale'):
|
||||
os.mkdir('lib/locale')
|
||||
cmd = 'xgettext -s --from-code UTF-8 --language Python --no-wrap -f app.fil --output=lib/locale/messages.pot'
|
||||
print('Generate template')
|
||||
os.system(cmd)
|
||||
|
||||
os.chdir('electrum')
|
||||
os.chdir('lib')
|
||||
|
||||
crowdin_identifier = 'electrum'
|
||||
crowdin_file_name = 'files[electrum-client/messages.pot]'
|
||||
@ -59,7 +55,7 @@ if crowdin_api_key:
|
||||
|
||||
# Download & unzip
|
||||
print('Download translations')
|
||||
s = requests.request('GET', 'https://crowdin.com/backend/download/project/' + crowdin_identifier + '.zip').content
|
||||
s = requests.request('GET', 'https://crowdin.com/download/project/' + crowdin_identifier + '.zip').content
|
||||
zfobj = zipfile.ZipFile(io.BytesIO(s))
|
||||
|
||||
print('Unzip translations')
|
||||
|
||||
@ -1,10 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
CONTRIB="$(dirname "$0")"
|
||||
test -n "$CONTRIB" -a -d "$CONTRIB" || exit
|
||||
contrib=$(dirname "$0")
|
||||
test -n "$contrib" -a -d "$contrib" || exit
|
||||
|
||||
rm "$CONTRIB"/../packages/ -r
|
||||
whereis pip3
|
||||
if [ $? -ne 0 ] ; then echo "Install pip3" ; exit ; fi
|
||||
|
||||
rm "$contrib"/../packages/ -r
|
||||
|
||||
#Install pure python modules in electrum directory
|
||||
python3 -m pip install -r "$CONTRIB"/deterministic-build/requirements.txt -t "$CONTRIB"/../packages
|
||||
pip3 install -r $contrib/deterministic-build/requirements.txt -t $contrib/../packages
|
||||
|
||||
|
||||
@ -1,31 +1 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
CONTRIB="$(dirname "$(readlink -e "$0")")"
|
||||
ROOT_FOLDER="$CONTRIB"/..
|
||||
PACKAGES="$ROOT_FOLDER"/packages/
|
||||
LOCALE="$ROOT_FOLDER"/electrum/locale/
|
||||
|
||||
if [ ! -d "$LOCALE" ]; then
|
||||
echo "Run make_locale first!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$PACKAGES" ]; then
|
||||
echo "Run make_packages first!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
(
|
||||
cd "$ROOT_FOLDER"
|
||||
|
||||
echo "'git clean -fx' would delete the following files: >>>"
|
||||
git clean -fx --dry-run
|
||||
echo "<<<"
|
||||
|
||||
# we could build the kivy atlas potentially?
|
||||
#(cd electrum/gui/kivy/; make theming) || echo "building kivy atlas failed! skipping."
|
||||
|
||||
python3 setup.py --quiet sdist --format=zip,gztar
|
||||
)
|
||||
python3 setup.py sdist --format=zip,gztar
|
||||
|
||||
@ -1 +0,0 @@
|
||||
Subproject commit 59dfc03272751cd29ee311456fa34c40f7ebb7c0
|
||||
@ -1,66 +0,0 @@
|
||||
Building Mac OS binaries
|
||||
========================
|
||||
|
||||
This guide explains how to build Electrum binaries for macOS systems.
|
||||
|
||||
|
||||
## 1. Building the binary
|
||||
|
||||
This needs to be done on a system running macOS or OS X. We use El Capitan (10.11.6) as building it
|
||||
on High Sierra (or later)
|
||||
makes the binaries [incompatible with older versions](https://github.com/pyinstaller/pyinstaller/issues/1191).
|
||||
|
||||
Before starting, make sure that the Xcode command line tools are installed (e.g. you have `git`).
|
||||
|
||||
#### 1.1a Get Xcode
|
||||
|
||||
Building the QR scanner (CalinsQRReader) requires full Xcode (not just command line tools).
|
||||
|
||||
The last Xcode version compatible with El Capitan is Xcode 8.2.1
|
||||
|
||||
Get it from [here](https://developer.apple.com/download/more/).
|
||||
|
||||
Unfortunately, you need an "Apple ID" account.
|
||||
|
||||
After downloading, uncompress it.
|
||||
|
||||
Make sure it is the "selected" xcode (e.g.):
|
||||
|
||||
sudo xcode-select -s $HOME/Downloads/Xcode.app/Contents/Developer/
|
||||
|
||||
#### 1.1b Build QR scanner separately on newer Mac
|
||||
|
||||
Alternatively, you can try building just the QR scanner on newer macOS.
|
||||
|
||||
On newer Mac, run:
|
||||
|
||||
pushd contrib/osx/CalinsQRReader; xcodebuild; popd
|
||||
cp -r contrib/osx/CalinsQRReader/build prebuilt_qr
|
||||
|
||||
Move `prebuilt_qr` to El Capitan: `contrib/osx/CalinsQRReader/prebuilt_qr`.
|
||||
|
||||
|
||||
#### 1.2 Build Electrum
|
||||
|
||||
cd electrum
|
||||
./contrib/osx/make_osx
|
||||
|
||||
This creates both a folder named Electrum.app and the .dmg file.
|
||||
|
||||
|
||||
## 2. Building the image deterministically (WIP)
|
||||
The usual way to distribute macOS applications is to use image files containing the
|
||||
application. Although these images can be created on a Mac with the built-in `hdiutil`,
|
||||
they are not deterministic.
|
||||
|
||||
Instead, we use the toolchain that Bitcoin uses: genisoimage and libdmg-hfsplus.
|
||||
These tools do not work on macOS, so you need a separate Linux machine (or VM).
|
||||
|
||||
Copy the Electrum.app directory over and install the dependencies, e.g.:
|
||||
|
||||
apt install libcap-dev cmake make gcc faketime
|
||||
|
||||
Then you can just invoke `package.sh` with the path to the app:
|
||||
|
||||
cd electrum
|
||||
./contrib/osx/package.sh ~/Electrum.app/
|
||||
@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
. $(dirname "$0")/../build_tools_util.sh
|
||||
|
||||
|
||||
function DoCodeSignMaybe { # ARGS: infoName fileOrDirName codesignIdentity
|
||||
infoName="$1"
|
||||
file="$2"
|
||||
identity="$3"
|
||||
deep=""
|
||||
if [ -z "$identity" ]; then
|
||||
# we are ok with them not passing anything; master script calls us unconditionally even if no identity is specified
|
||||
return
|
||||
fi
|
||||
if [ -d "$file" ]; then
|
||||
deep="--deep"
|
||||
fi
|
||||
if [ -z "$infoName" ] || [ -z "$file" ] || [ -z "$identity" ] || [ ! -e "$file" ]; then
|
||||
fail "Argument error to internal function DoCodeSignMaybe()"
|
||||
fi
|
||||
info "Code signing ${infoName}..."
|
||||
codesign -f -v $deep -s "$identity" "$file" || fail "Could not code sign ${infoName}"
|
||||
}
|
||||
@ -1,143 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Parameterize
|
||||
PYTHON_VERSION=3.6.4
|
||||
BUILDDIR=/tmp/electrum-build
|
||||
PACKAGE=Electrum
|
||||
GIT_REPO=https://github.com/spesmilo/electrum
|
||||
LIBSECP_VERSION="b408c6a8b287003d1ade5709e6f7bc3c7f1d5be7"
|
||||
|
||||
. $(dirname "$0")/base.sh
|
||||
|
||||
src_dir=$(dirname "$0")
|
||||
cd $src_dir/../..
|
||||
|
||||
export PYTHONHASHSEED=22
|
||||
VERSION=`git describe --tags --dirty --always`
|
||||
|
||||
which brew > /dev/null 2>&1 || fail "Please install brew from https://brew.sh/ to continue"
|
||||
which xcodebuild > /dev/null 2>&1 || fail "Please install Xcode and xcode command line tools to continue"
|
||||
|
||||
# Code Signing: See https://developer.apple.com/library/archive/documentation/Security/Conceptual/CodeSigningGuide/Procedures/Procedures.html
|
||||
APP_SIGN=""
|
||||
if [ -n "$1" ]; then
|
||||
# Test the identity is valid for signing by doing this hack. There is no other way to do this.
|
||||
cp -f /bin/ls ./CODESIGN_TEST
|
||||
codesign -s "$1" --dryrun -f ./CODESIGN_TEST > /dev/null 2>&1
|
||||
res=$?
|
||||
rm -f ./CODESIGN_TEST
|
||||
if ((res)); then
|
||||
fail "Code signing identity \"$1\" appears to be invalid."
|
||||
fi
|
||||
unset res
|
||||
APP_SIGN="$1"
|
||||
info "Code signing enabled using identity \"$APP_SIGN\""
|
||||
else
|
||||
warn "Code signing DISABLED. Specify a valid macOS Developer identity installed on the system as the first argument to this script to enable signing."
|
||||
fi
|
||||
|
||||
info "Installing Python $PYTHON_VERSION"
|
||||
export PATH="~/.pyenv/bin:~/.pyenv/shims:~/Library/Python/3.6/bin:$PATH"
|
||||
if [ -d "~/.pyenv" ]; then
|
||||
pyenv update
|
||||
else
|
||||
curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash > /dev/null 2>&1
|
||||
fi
|
||||
PYTHON_CONFIGURE_OPTS="--enable-framework" pyenv install -s $PYTHON_VERSION && \
|
||||
pyenv global $PYTHON_VERSION || \
|
||||
fail "Unable to use Python $PYTHON_VERSION"
|
||||
|
||||
|
||||
info "install dependencies specific to binaries"
|
||||
# note that this also installs pinned versions of both pip and setuptools
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements-binaries.txt --user \
|
||||
|| fail "Could not install pyinstaller"
|
||||
|
||||
|
||||
info "Installing pyinstaller"
|
||||
python3 -m pip install -I --user pyinstaller==3.4 --no-use-pep517 || fail "Could not install pyinstaller"
|
||||
|
||||
info "Using these versions for building $PACKAGE:"
|
||||
sw_vers
|
||||
python3 --version
|
||||
echo -n "Pyinstaller "
|
||||
pyinstaller --version
|
||||
|
||||
rm -rf ./dist
|
||||
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
rm -rf $BUILDDIR > /dev/null 2>&1
|
||||
mkdir $BUILDDIR
|
||||
|
||||
cp -R ./contrib/deterministic-build/electrum-locale/locale/ ./electrum/locale/
|
||||
|
||||
|
||||
info "Downloading libusb..."
|
||||
curl https://homebrew.bintray.com/bottles/libusb-1.0.22.el_capitan.bottle.tar.gz | \
|
||||
tar xz --directory $BUILDDIR
|
||||
cp $BUILDDIR/libusb/1.0.22/lib/libusb-1.0.dylib contrib/osx
|
||||
echo "82c368dfd4da017ceb32b12ca885576f325503428a4966cc09302cbd62702493 contrib/osx/libusb-1.0.dylib" | \
|
||||
shasum -a 256 -c || fail "libusb checksum mismatched"
|
||||
|
||||
info "Building libsecp256k1"
|
||||
brew install autoconf automake libtool
|
||||
git clone https://github.com/bitcoin-core/secp256k1 $BUILDDIR/secp256k1
|
||||
pushd $BUILDDIR/secp256k1
|
||||
git reset --hard $LIBSECP_VERSION
|
||||
git clean -f -x -q
|
||||
./autogen.sh
|
||||
./configure --enable-module-recovery --enable-experimental --enable-module-ecdh --disable-jni
|
||||
make
|
||||
popd
|
||||
cp $BUILDDIR/secp256k1/.libs/libsecp256k1.0.dylib contrib/osx
|
||||
|
||||
info "Building CalinsQRReader..."
|
||||
d=contrib/osx/CalinsQRReader
|
||||
pushd $d
|
||||
rm -fr build
|
||||
# prefer building using xcode ourselves. otherwise fallback to prebuilt binary
|
||||
xcodebuild || cp -r prebuilt_qr build || fail "Could not build CalinsQRReader"
|
||||
popd
|
||||
DoCodeSignMaybe "CalinsQRReader.app" "${d}/build/Release/CalinsQRReader.app" "$APP_SIGN" # If APP_SIGN is empty will be a noop
|
||||
|
||||
|
||||
info "Installing requirements..."
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements.txt --user || \
|
||||
fail "Could not install requirements"
|
||||
|
||||
info "Installing hardware wallet requirements..."
|
||||
python3 -m pip install -Ir ./contrib/deterministic-build/requirements-hw.txt --user || \
|
||||
fail "Could not install hardware wallet requirements"
|
||||
|
||||
info "Building $PACKAGE..."
|
||||
python3 -m pip install --user . > /dev/null || fail "Could not build $PACKAGE"
|
||||
|
||||
info "Faking timestamps..."
|
||||
for d in ~/Library/Python/ ~/.pyenv .; do
|
||||
pushd $d
|
||||
find . -exec touch -t '200101220000' {} +
|
||||
popd
|
||||
done
|
||||
|
||||
info "Building binary"
|
||||
APP_SIGN="$APP_SIGN" pyinstaller --noconfirm --ascii --clean --name $VERSION contrib/osx/osx.spec || fail "Could not build binary"
|
||||
|
||||
info "Adding bitcoin URI types to Info.plist"
|
||||
plutil -insert 'CFBundleURLTypes' \
|
||||
-xml '<array><dict> <key>CFBundleURLName</key> <string>bitcoin</string> <key>CFBundleURLSchemes</key> <array><string>bitcoin</string></array> </dict></array>' \
|
||||
-- dist/$PACKAGE.app/Contents/Info.plist \
|
||||
|| fail "Could not add keys to Info.plist. Make sure the program 'plutil' exists and is installed."
|
||||
|
||||
DoCodeSignMaybe "app bundle" "dist/${PACKAGE}.app" "$APP_SIGN" # If APP_SIGN is empty will be a noop
|
||||
|
||||
info "Creating .DMG"
|
||||
hdiutil create -fs HFS+ -volname $PACKAGE -srcfolder dist/$PACKAGE.app dist/electrum-$VERSION.dmg || fail "Could not create .DMG"
|
||||
|
||||
DoCodeSignMaybe ".DMG" "dist/electrum-${VERSION}.dmg" "$APP_SIGN" # If APP_SIGN is empty will be a noop
|
||||
|
||||
if [ -z "$APP_SIGN" ]; then
|
||||
warn "App was built successfully but was not code signed. Users may get security warnings from macOS."
|
||||
warn "Specify a valid code signing identity as the first argument to this script to enable code signing."
|
||||
fi
|
||||
@ -1,162 +0,0 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, collect_dynamic_libs
|
||||
|
||||
import sys, os
|
||||
|
||||
PACKAGE='Electrum'
|
||||
PYPKG='electrum'
|
||||
MAIN_SCRIPT='run_electrum'
|
||||
ICONS_FILE=PYPKG + '/gui/icons/electrum.icns'
|
||||
APP_SIGN = os.environ.get('APP_SIGN', '')
|
||||
|
||||
def fail(*msg):
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
print("\r🗯 {}ERROR:{}".format(RED, NC), *msg)
|
||||
sys.exit(1)
|
||||
|
||||
def codesign(identity, binary):
|
||||
d = os.path.dirname(binary)
|
||||
saved_dir=None
|
||||
if d:
|
||||
# switch to directory of the binary so codesign verbose messages don't include long path
|
||||
saved_dir = os.path.abspath(os.path.curdir)
|
||||
os.chdir(d)
|
||||
binary = os.path.basename(binary)
|
||||
os.system("codesign -v -f -s '{}' '{}'".format(identity, binary))==0 or fail("Could not code sign " + binary)
|
||||
if saved_dir:
|
||||
os.chdir(saved_dir)
|
||||
|
||||
def monkey_patch_pyinstaller_for_codesigning(identity):
|
||||
# Monkey-patch PyInstaller so that we app-sign all binaries *after* they are modified by PyInstaller
|
||||
# If we app-sign before that point, the signature will be invalid because PyInstaller modifies
|
||||
# @loader_path in the Mach-O loader table.
|
||||
try:
|
||||
import PyInstaller.depend.dylib
|
||||
_saved_func = PyInstaller.depend.dylib.mac_set_relative_dylib_deps
|
||||
except (ImportError, NameError, AttributeError):
|
||||
# Hmm. Likely wrong PyInstaller version.
|
||||
fail("Could not monkey-patch PyInstaller for code signing. Please ensure that you are using PyInstaller 3.4.")
|
||||
_signed = set()
|
||||
def my_func(fn, distname):
|
||||
_saved_func(fn, distname)
|
||||
if (fn, distname) not in _signed:
|
||||
codesign(identity, fn)
|
||||
_signed.add((fn,distname)) # remember we signed it so we don't sign again
|
||||
PyInstaller.depend.dylib.mac_set_relative_dylib_deps = my_func
|
||||
|
||||
|
||||
for i, x in enumerate(sys.argv):
|
||||
if x == '--name':
|
||||
VERSION = sys.argv[i+1]
|
||||
break
|
||||
else:
|
||||
raise Exception('no version')
|
||||
|
||||
electrum = os.path.abspath(".") + "/"
|
||||
block_cipher = None
|
||||
|
||||
# see https://github.com/pyinstaller/pyinstaller/issues/2005
|
||||
hiddenimports = []
|
||||
hiddenimports += collect_submodules('trezorlib')
|
||||
hiddenimports += collect_submodules('safetlib')
|
||||
hiddenimports += collect_submodules('btchip')
|
||||
hiddenimports += collect_submodules('keepkeylib')
|
||||
hiddenimports += collect_submodules('websocket')
|
||||
hiddenimports += collect_submodules('ckcc')
|
||||
|
||||
# safetlib imports PyQt5.Qt. We use a local updated copy of pinmatrix.py until they
|
||||
# release a new version that includes https://github.com/archos-safe-t/python-safet/commit/b1eab3dba4c04fdfc1fcf17b66662c28c5f2380e
|
||||
hiddenimports.remove('safetlib.qt.pinmatrix')
|
||||
|
||||
|
||||
datas = [
|
||||
(electrum + PYPKG + '/*.json', PYPKG),
|
||||
(electrum + PYPKG + '/wordlist/english.txt', PYPKG + '/wordlist'),
|
||||
(electrum + PYPKG + '/locale', PYPKG + '/locale'),
|
||||
(electrum + PYPKG + '/plugins', PYPKG + '/plugins'),
|
||||
(electrum + PYPKG + '/gui/icons', PYPKG + '/gui/icons'),
|
||||
]
|
||||
datas += collect_data_files('trezorlib')
|
||||
datas += collect_data_files('safetlib')
|
||||
datas += collect_data_files('btchip')
|
||||
datas += collect_data_files('keepkeylib')
|
||||
datas += collect_data_files('ckcc')
|
||||
|
||||
# Add the QR Scanner helper app
|
||||
datas += [(electrum + "contrib/osx/CalinsQRReader/build/Release/CalinsQRReader.app", "./contrib/osx/CalinsQRReader/build/Release/CalinsQRReader.app")]
|
||||
|
||||
# Add libusb so Trezor and Safe-T mini will work
|
||||
binaries = [(electrum + "contrib/osx/libusb-1.0.dylib", ".")]
|
||||
binaries += [(electrum + "contrib/osx/libsecp256k1.0.dylib", ".")]
|
||||
|
||||
# Workaround for "Retro Look":
|
||||
binaries += [b for b in collect_dynamic_libs('PyQt5') if 'macstyle' in b[0]]
|
||||
|
||||
# We don't put these files in to actually include them in the script but to make the Analysis method scan them for imports
|
||||
a = Analysis([electrum+ MAIN_SCRIPT,
|
||||
electrum+'electrum/gui/qt/main_window.py',
|
||||
electrum+'electrum/gui/text.py',
|
||||
electrum+'electrum/util.py',
|
||||
electrum+'electrum/wallet.py',
|
||||
electrum+'electrum/simple_config.py',
|
||||
electrum+'electrum/bitcoin.py',
|
||||
electrum+'electrum/dnssec.py',
|
||||
electrum+'electrum/commands.py',
|
||||
electrum+'electrum/plugins/cosigner_pool/qt.py',
|
||||
electrum+'electrum/plugins/email_requests/qt.py',
|
||||
electrum+'electrum/plugins/trezor/qt.py',
|
||||
electrum+'electrum/plugins/safe_t/client.py',
|
||||
electrum+'electrum/plugins/safe_t/qt.py',
|
||||
electrum+'electrum/plugins/keepkey/qt.py',
|
||||
electrum+'electrum/plugins/ledger/qt.py',
|
||||
electrum+'electrum/plugins/coldcard/qt.py',
|
||||
],
|
||||
binaries=binaries,
|
||||
datas=datas,
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=[])
|
||||
|
||||
# http://stackoverflow.com/questions/19055089/pyinstaller-onefile-warning-pyconfig-h-when-importing-scipy-or-scipy-signal
|
||||
for d in a.datas:
|
||||
if 'pyconfig' in d[0]:
|
||||
a.datas.remove(d)
|
||||
break
|
||||
|
||||
# Strip out parts of Qt that we never use. Reduces binary size by tens of MBs. see #4815
|
||||
qt_bins2remove=('qtweb', 'qt3d', 'qtgame', 'qtdesigner', 'qtquick', 'qtlocation', 'qttest', 'qtxml')
|
||||
print("Removing Qt binaries:", *qt_bins2remove)
|
||||
for x in a.binaries.copy():
|
||||
for r in qt_bins2remove:
|
||||
if x[0].lower().startswith(r):
|
||||
a.binaries.remove(x)
|
||||
print('----> Removed x =', x)
|
||||
|
||||
# If code signing, monkey-patch in a code signing step to pyinstaller. See: https://github.com/spesmilo/electrum/issues/4994
|
||||
if APP_SIGN:
|
||||
monkey_patch_pyinstaller_for_codesigning(APP_SIGN)
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
name=PACKAGE,
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
icon=electrum+ICONS_FILE,
|
||||
console=False)
|
||||
|
||||
app = BUNDLE(exe,
|
||||
version = VERSION,
|
||||
name=PACKAGE + '.app',
|
||||
icon=electrum+ICONS_FILE,
|
||||
bundle_identifier=None,
|
||||
info_plist={
|
||||
'NSHighResolutionCapable': 'True',
|
||||
'NSSupportsAutomaticGraphicsSwitching': 'True'
|
||||
}
|
||||
)
|
||||
@ -1,8 +1,6 @@
|
||||
Cython>=0.27
|
||||
trezor[hidapi]>=0.11.1
|
||||
safet[hidapi]>=0.1.0
|
||||
trezor[hidapi]>=0.9.0
|
||||
keepkey
|
||||
btchip-python>=0.1.26
|
||||
ckcc-protocol>=0.7.2
|
||||
btchip-python
|
||||
websocket-client
|
||||
hidapi
|
||||
|
||||
@ -1,12 +1,10 @@
|
||||
pyaes>=0.1a1
|
||||
ecdsa>=0.9
|
||||
pbkdf2
|
||||
requests
|
||||
qrcode
|
||||
protobuf
|
||||
dnspython
|
||||
jsonrpclib-pelix
|
||||
qdarkstyle<2.6
|
||||
aiorpcx>=0.9,<0.11
|
||||
aiohttp>=3.3.0
|
||||
aiohttp_socks
|
||||
certifi
|
||||
pylibscrypt==1.7.1
|
||||
PySocks>=1.6.6
|
||||
qdarkstyle<3.0
|
||||
|
||||
@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
version=`python3 -c "import electrum; print(electrum.version.ELECTRUM_VERSION)"`
|
||||
sig=`./run_electrum -w $SIGNING_WALLET signmessage $SIGNING_ADDRESS $version`
|
||||
echo "{ \"version\":\"$version\", \"signatures\":{ \"$SIGNING_ADDRESS\":\"$sig\"}}"
|
||||
@ -26,22 +26,13 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
MIN_PYTHON_VERSION = "3.6.1" # FIXME duplicated from setup.py
|
||||
_min_python_version_tuple = tuple(map(int, (MIN_PYTHON_VERSION.split("."))))
|
||||
|
||||
|
||||
if sys.version_info[:3] < _min_python_version_tuple:
|
||||
sys.exit("Error: Electrum requires Python version >= %s..." % MIN_PYTHON_VERSION)
|
||||
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
is_bundle = getattr(sys, 'frozen', False)
|
||||
is_local = not is_bundle and os.path.exists(os.path.join(script_dir, "electrum.desktop"))
|
||||
is_android = 'ANDROID_DATA' in os.environ
|
||||
|
||||
# move this back to gui/kivy/__init.py once plugins are moved
|
||||
os.environ['KIVY_DATA_DIR'] = os.path.abspath(os.path.dirname(__file__)) + '/electrum/gui/kivy/data/'
|
||||
os.environ['KIVY_DATA_DIR'] = os.path.abspath(os.path.dirname(__file__)) + '/gui/kivy/data/'
|
||||
|
||||
if is_local or is_android:
|
||||
sys.path.insert(0, os.path.join(script_dir, 'packages'))
|
||||
@ -53,13 +44,13 @@ def check_imports():
|
||||
import dns
|
||||
import pyaes
|
||||
import ecdsa
|
||||
import certifi
|
||||
import requests
|
||||
import qrcode
|
||||
import pbkdf2
|
||||
import google.protobuf
|
||||
import jsonrpclib
|
||||
import aiorpcx
|
||||
except ImportError as e:
|
||||
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
|
||||
sys.exit("Error: %s. Try 'sudo pip install <module-name>'"%str(e))
|
||||
# the following imports are for pyinstaller
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import message
|
||||
@ -67,23 +58,31 @@ def check_imports():
|
||||
from google.protobuf import descriptor_pb2
|
||||
from jsonrpclib import SimpleJSONRPCServer
|
||||
# make sure that certificates are here
|
||||
assert os.path.exists(certifi.where())
|
||||
assert os.path.exists(requests.utils.DEFAULT_CA_BUNDLE_PATH)
|
||||
|
||||
|
||||
if not is_android:
|
||||
check_imports()
|
||||
|
||||
# load local module as electrum
|
||||
if is_local or is_android:
|
||||
import imp
|
||||
imp.load_module('electrum', *imp.find_module('lib'))
|
||||
imp.load_module('electrum_gui', *imp.find_module('gui'))
|
||||
|
||||
from electrum import util
|
||||
|
||||
|
||||
from electrum import bitcoin, util
|
||||
from electrum import constants
|
||||
from electrum import SimpleConfig
|
||||
from electrum.wallet import Wallet
|
||||
from electrum import SimpleConfig, Network
|
||||
from electrum.wallet import Wallet, Imported_Wallet
|
||||
from electrum.storage import WalletStorage, get_derivation_used_for_hw_device_encryption
|
||||
from electrum.util import print_msg, print_stderr, json_encode, json_decode, UserCancelled
|
||||
from electrum.util import set_verbosity, InvalidPassword
|
||||
from electrum.commands import get_parser, known_commands, Commands, config_variables
|
||||
from electrum import daemon
|
||||
from electrum import keystore
|
||||
from electrum.mnemonic import Mnemonic
|
||||
|
||||
# get password routine
|
||||
def prompt_password(prompt, confirm=True):
|
||||
@ -98,6 +97,80 @@ def prompt_password(prompt, confirm=True):
|
||||
return password
|
||||
|
||||
|
||||
|
||||
def run_non_RPC(config):
|
||||
cmdname = config.get('cmd')
|
||||
|
||||
storage = WalletStorage(config.get_wallet_path())
|
||||
if storage.file_exists():
|
||||
sys.exit("Error: Remove the existing wallet first!")
|
||||
|
||||
def password_dialog():
|
||||
return prompt_password("Password (hit return if you do not wish to encrypt your wallet):")
|
||||
|
||||
if cmdname == 'restore':
|
||||
text = config.get('text').strip()
|
||||
passphrase = config.get('passphrase', '')
|
||||
password = password_dialog() if keystore.is_private(text) else None
|
||||
if keystore.is_address_list(text):
|
||||
wallet = Imported_Wallet(storage)
|
||||
for x in text.split():
|
||||
wallet.import_address(x)
|
||||
elif keystore.is_private_key_list(text):
|
||||
k = keystore.Imported_KeyStore({})
|
||||
storage.put('keystore', k.dump())
|
||||
storage.put('use_encryption', bool(password))
|
||||
wallet = Imported_Wallet(storage)
|
||||
for x in text.split():
|
||||
wallet.import_private_key(x, password)
|
||||
storage.write()
|
||||
else:
|
||||
if keystore.is_seed(text):
|
||||
k = keystore.from_seed(text, passphrase, False)
|
||||
elif keystore.is_master_key(text):
|
||||
k = keystore.from_master_key(text)
|
||||
else:
|
||||
sys.exit("Error: Seed or key not recognized")
|
||||
if password:
|
||||
k.update_password(None, password)
|
||||
storage.put('keystore', k.dump())
|
||||
storage.put('wallet_type', 'standard')
|
||||
storage.put('use_encryption', bool(password))
|
||||
storage.write()
|
||||
wallet = Wallet(storage)
|
||||
if not config.get('offline'):
|
||||
network = Network(config)
|
||||
network.start()
|
||||
wallet.start_threads(network)
|
||||
print_msg("Recovering wallet...")
|
||||
wallet.synchronize()
|
||||
wallet.wait_until_synchronized()
|
||||
wallet.stop_threads()
|
||||
# note: we don't wait for SPV
|
||||
msg = "Recovery successful" if wallet.is_found() else "Found no history for this wallet"
|
||||
else:
|
||||
msg = "This wallet was restored offline. It may contain more addresses than displayed."
|
||||
print_msg(msg)
|
||||
|
||||
elif cmdname == 'create':
|
||||
password = password_dialog()
|
||||
passphrase = config.get('passphrase', '')
|
||||
seed_type = 'segwit' if config.get('segwit') else 'standard'
|
||||
seed = Mnemonic('en').make_seed(seed_type)
|
||||
k = keystore.from_seed(seed, passphrase, False)
|
||||
storage.put('keystore', k.dump())
|
||||
storage.put('wallet_type', 'standard')
|
||||
wallet = Wallet(storage)
|
||||
wallet.update_password(None, password, True)
|
||||
wallet.synchronize()
|
||||
print_msg("Your wallet generation seed is:\n\"%s\"" % seed)
|
||||
print_msg("Please keep it in a safe place; if you lose it, you will not be able to restore your wallet.")
|
||||
|
||||
wallet.storage.write()
|
||||
print_msg("Wallet saved in '%s'" % wallet.storage.path)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def init_daemon(config_options):
|
||||
config = SimpleConfig(config_options)
|
||||
storage = WalletStorage(config.get_wallet_path())
|
||||
@ -166,28 +239,28 @@ def init_cmdline(config_options, server):
|
||||
else:
|
||||
password = None
|
||||
|
||||
config_options['password'] = config_options.get('password') or password
|
||||
config_options['password'] = password
|
||||
|
||||
if cmd.name == 'password':
|
||||
new_password = prompt_password('New password:')
|
||||
config_options['new_password'] = new_password
|
||||
|
||||
return cmd, password
|
||||
|
||||
|
||||
def get_connected_hw_devices(plugins):
|
||||
supported_plugins = plugins.get_hardware_support()
|
||||
support = plugins.get_hardware_support()
|
||||
if not support:
|
||||
print_msg('No hardware wallet support found on your system.')
|
||||
sys.exit(1)
|
||||
# scan devices
|
||||
devices = []
|
||||
devmgr = plugins.device_manager
|
||||
for splugin in supported_plugins:
|
||||
name, plugin = splugin.name, splugin.plugin
|
||||
if not plugin:
|
||||
e = splugin.exception
|
||||
print_stderr(f"{name}: error during plugin init: {repr(e)}")
|
||||
continue
|
||||
for name, description, plugin in support:
|
||||
try:
|
||||
u = devmgr.unpaired_device_infos(None, plugin)
|
||||
except:
|
||||
devmgr.print_error(f'error getting device infos for {name}: {e}')
|
||||
devmgr.print_error("error", name)
|
||||
continue
|
||||
devices += list(map(lambda x: (name, x), u))
|
||||
return devices
|
||||
@ -230,7 +303,7 @@ def run_offline_command(config, config_options, plugins):
|
||||
# check password
|
||||
if cmd.requires_password and wallet.has_password():
|
||||
try:
|
||||
wallet.check_password(password)
|
||||
seed = wallet.check_password(password)
|
||||
except InvalidPassword:
|
||||
print_msg("Error: This password does not decode this wallet.")
|
||||
sys.exit(1)
|
||||
@ -253,10 +326,9 @@ def run_offline_command(config, config_options, plugins):
|
||||
wallet.storage.write()
|
||||
return result
|
||||
|
||||
|
||||
def init_plugins(config, gui_name):
|
||||
from electrum.plugin import Plugins
|
||||
return Plugins(config, gui_name)
|
||||
from electrum.plugins import Plugins
|
||||
return Plugins(config, is_local or is_android, gui_name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
@ -270,14 +342,6 @@ if __name__ == '__main__':
|
||||
sys.argv.remove('help')
|
||||
sys.argv.append('-h')
|
||||
|
||||
# old '-v' syntax
|
||||
try:
|
||||
i = sys.argv.index('-v')
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
sys.argv[i] = '-v*'
|
||||
|
||||
# read arguments from stdin pipe and prompt
|
||||
for i, arg in enumerate(sys.argv):
|
||||
if arg == '-':
|
||||
@ -298,7 +362,7 @@ if __name__ == '__main__':
|
||||
# config is an object passed to the various constructors (wallet, interface, gui)
|
||||
if is_android:
|
||||
config_options = {
|
||||
'verbosity': '',
|
||||
'verbose': True,
|
||||
'cmd': 'gui',
|
||||
'gui': 'kivy',
|
||||
}
|
||||
@ -319,7 +383,7 @@ if __name__ == '__main__':
|
||||
config_options['electrum_path'] = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'electrum_data')
|
||||
|
||||
# kivy sometimes freezes when we write to sys.stderr
|
||||
set_verbosity(config_options.get('verbosity') if config_options.get('gui') != 'kivy' else '')
|
||||
set_verbosity(config_options.get('verbose') and config_options.get('gui')!='kivy')
|
||||
|
||||
# check uri
|
||||
uri = config_options.get('url')
|
||||
@ -340,11 +404,17 @@ if __name__ == '__main__':
|
||||
elif config.get('simnet'):
|
||||
constants.set_simnet()
|
||||
|
||||
# run non-RPC commands separately
|
||||
if cmdname in ['create', 'restore']:
|
||||
run_non_RPC(config)
|
||||
sys.exit(0)
|
||||
|
||||
if cmdname == 'gui':
|
||||
fd, server = daemon.get_fd_or_server(config)
|
||||
if fd is not None:
|
||||
plugins = init_plugins(config, config.get('gui', 'qt'))
|
||||
d = daemon.Daemon(config, fd)
|
||||
d = daemon.Daemon(config, fd, True)
|
||||
d.start()
|
||||
d.init_gui(config, plugins)
|
||||
sys.exit(0)
|
||||
else:
|
||||
@ -364,10 +434,11 @@ if __name__ == '__main__':
|
||||
print_stderr("starting daemon (PID %d)" % pid)
|
||||
sys.exit(0)
|
||||
init_plugins(config, 'cmdline')
|
||||
d = daemon.Daemon(config, fd)
|
||||
d = daemon.Daemon(config, fd, False)
|
||||
d.start()
|
||||
if config.get('websocket_server'):
|
||||
from electrum import websockets
|
||||
websockets.WebSocketServer(config, d.network)
|
||||
websockets.WebSocketServer(config, d.network).start()
|
||||
if config.get('requests_dir'):
|
||||
path = os.path.join(config.get('requests_dir'), 'index.html')
|
||||
if not os.path.exists(path):
|
||||
@ -17,11 +17,11 @@ if [ -e ./env/bin/activate ]; then
|
||||
else
|
||||
virtualenv env -p `which python3`
|
||||
source ./env/bin/activate
|
||||
python3 -m pip install .[fast]
|
||||
python3 setup.py install
|
||||
fi
|
||||
|
||||
export PYTHONPATH="/usr/local/lib/python${PYTHON_VER}/site-packages:$PYTHONPATH"
|
||||
|
||||
./run_electrum "$@"
|
||||
./electrum "$@"
|
||||
|
||||
deactivate
|
||||
|
||||
16
electrum.conf.sample
Normal file
@ -0,0 +1,16 @@
|
||||
# Configuration file for the Electrum client
|
||||
# Settings defined here are shared across wallets
|
||||
#
|
||||
# copy this file to /etc/electrum.conf if you want read-only settings
|
||||
|
||||
[client]
|
||||
server = electrum.novit.ro:50001:t
|
||||
proxy = None
|
||||
gap_limit = 5
|
||||
# booleans use python syntax
|
||||
use_change = True
|
||||
gui = qt
|
||||
num_zeros = 2
|
||||
# default transaction fee is in Satoshis
|
||||
fee = 10000
|
||||
winpos-qt = [799, 226, 877, 435]
|
||||
@ -3,20 +3,19 @@
|
||||
|
||||
[Desktop Entry]
|
||||
Comment=Lightweight Bitcoin Client
|
||||
Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\"; electrum %u"
|
||||
Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\" electrum %u"
|
||||
GenericName[en_US]=Bitcoin Wallet
|
||||
GenericName=Bitcoin Wallet
|
||||
Icon=electrum
|
||||
Name[en_US]=Electrum Bitcoin Wallet
|
||||
Name=Electrum Bitcoin Wallet
|
||||
Categories=Finance;Network;
|
||||
StartupNotify=true
|
||||
StartupWMClass=electrum
|
||||
StartupNotify=false
|
||||
Terminal=false
|
||||
Type=Application
|
||||
MimeType=x-scheme-handler/bitcoin;
|
||||
Actions=Testnet;
|
||||
|
||||
[Desktop Action Testnet]
|
||||
Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\"; electrum --testnet %u"
|
||||
Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\" electrum --testnet %u"
|
||||
Name=Testnet mode
|
||||
|
||||
BIN
electrum.icns
Normal file
@ -1,877 +0,0 @@
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2018 The Electrum Developers
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import threading
|
||||
import asyncio
|
||||
import itertools
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Dict, Optional
|
||||
|
||||
from . import bitcoin
|
||||
from .bitcoin import COINBASE_MATURITY, TYPE_ADDRESS, TYPE_PUBKEY
|
||||
from .util import PrintError, profiler, bfh, TxMinedInfo
|
||||
from .transaction import Transaction, TxOutput
|
||||
from .synchronizer import Synchronizer
|
||||
from .verifier import SPV
|
||||
from .blockchain import hash_header
|
||||
from .i18n import _
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .storage import WalletStorage
|
||||
from .network import Network
|
||||
|
||||
|
||||
TX_HEIGHT_LOCAL = -2
|
||||
TX_HEIGHT_UNCONF_PARENT = -1
|
||||
TX_HEIGHT_UNCONFIRMED = 0
|
||||
|
||||
class AddTransactionException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UnrelatedTransactionException(AddTransactionException):
|
||||
def __str__(self):
|
||||
return _("Transaction is unrelated to this wallet.")
|
||||
|
||||
|
||||
class AddressSynchronizer(PrintError):
|
||||
"""
|
||||
inherited by wallet
|
||||
"""
|
||||
|
||||
def __init__(self, storage: 'WalletStorage'):
|
||||
self.storage = storage
|
||||
self.network = None # type: Network
|
||||
# verifier (SPV) and synchronizer are started in start_network
|
||||
self.synchronizer = None # type: Synchronizer
|
||||
self.verifier = None # type: SPV
|
||||
# locks: if you need to take multiple ones, acquire them in the order they are defined here!
|
||||
self.lock = threading.RLock()
|
||||
self.transaction_lock = threading.RLock()
|
||||
# address -> list(txid, height)
|
||||
self.history = storage.get('addr_history',{})
|
||||
# Verified transactions. txid -> TxMinedInfo. Access with self.lock.
|
||||
verified_tx = storage.get('verified_tx3', {})
|
||||
self.verified_tx = {} # type: Dict[str, TxMinedInfo]
|
||||
for txid, (height, timestamp, txpos, header_hash, flodata) in verified_tx.items():
|
||||
self.verified_tx[txid] = TxMinedInfo(height=height,
|
||||
conf=None,
|
||||
timestamp=timestamp,
|
||||
txpos=txpos,
|
||||
header_hash=header_hash,
|
||||
flodata=flodata)
|
||||
# Transactions pending verification. txid -> tx_height. Access with self.lock.
|
||||
self.unverified_tx = defaultdict(int)
|
||||
# true when synchronized
|
||||
self.up_to_date = False
|
||||
# thread local storage for caching stuff
|
||||
self.threadlocal_cache = threading.local()
|
||||
|
||||
self.load_and_cleanup()
|
||||
|
||||
def with_transaction_lock(func):
|
||||
def func_wrapper(self, *args, **kwargs):
|
||||
with self.transaction_lock:
|
||||
return func(self, *args, **kwargs)
|
||||
return func_wrapper
|
||||
|
||||
def load_and_cleanup(self):
|
||||
self.load_transactions()
|
||||
self.load_local_history()
|
||||
self.check_history()
|
||||
self.load_unverified_transactions()
|
||||
self.remove_local_transactions_we_dont_have()
|
||||
|
||||
def is_mine(self, address):
|
||||
return address in self.history
|
||||
|
||||
def get_addresses(self):
|
||||
return sorted(self.history.keys())
|
||||
|
||||
def get_address_history(self, addr):
|
||||
h = []
|
||||
# we need self.transaction_lock but get_tx_height will take self.lock
|
||||
# so we need to take that too here, to enforce order of locks
|
||||
with self.lock, self.transaction_lock:
|
||||
related_txns = self._history_local.get(addr, set())
|
||||
for tx_hash in related_txns:
|
||||
tx_height = self.get_tx_height(tx_hash).height
|
||||
h.append((tx_hash, tx_height))
|
||||
return h
|
||||
|
||||
def get_address_history_len(self, addr: str) -> int:
|
||||
"""Return number of transactions where address is involved."""
|
||||
return len(self._history_local.get(addr, ()))
|
||||
|
||||
def get_txin_address(self, txi):
|
||||
addr = txi.get('address')
|
||||
if addr and addr != "(pubkey)":
|
||||
return addr
|
||||
prevout_hash = txi.get('prevout_hash')
|
||||
prevout_n = txi.get('prevout_n')
|
||||
dd = self.txo.get(prevout_hash, {})
|
||||
for addr, l in dd.items():
|
||||
for n, v, is_cb in l:
|
||||
if n == prevout_n:
|
||||
return addr
|
||||
return None
|
||||
|
||||
def get_txout_address(self, txo: TxOutput):
|
||||
if txo.type == TYPE_ADDRESS:
|
||||
addr = txo.address
|
||||
elif txo.type == TYPE_PUBKEY:
|
||||
addr = bitcoin.public_key_to_p2pkh(bfh(txo.address))
|
||||
else:
|
||||
addr = None
|
||||
return addr
|
||||
|
||||
def load_unverified_transactions(self):
|
||||
# review transactions that are in the history
|
||||
for addr, hist in self.history.items():
|
||||
for tx_hash, tx_height in hist:
|
||||
# add it in case it was previously unconfirmed
|
||||
self.add_unverified_tx(tx_hash, tx_height)
|
||||
|
||||
def start_network(self, network):
|
||||
self.network = network
|
||||
if self.network is not None:
|
||||
self.synchronizer = Synchronizer(self)
|
||||
self.verifier = SPV(self.network, self)
|
||||
|
||||
def stop_threads(self, write_to_disk=True):
|
||||
if self.network:
|
||||
if self.synchronizer:
|
||||
asyncio.run_coroutine_threadsafe(self.synchronizer.stop(), self.network.asyncio_loop)
|
||||
self.synchronizer = None
|
||||
if self.verifier:
|
||||
asyncio.run_coroutine_threadsafe(self.verifier.stop(), self.network.asyncio_loop)
|
||||
self.verifier = None
|
||||
self.storage.put('stored_height', self.get_local_height())
|
||||
if write_to_disk:
|
||||
self.save_transactions()
|
||||
self.save_verified_tx()
|
||||
self.storage.write()
|
||||
|
||||
def add_address(self, address):
|
||||
if address not in self.history:
|
||||
self.history[address] = []
|
||||
self.set_up_to_date(False)
|
||||
if self.synchronizer:
|
||||
self.synchronizer.add(address)
|
||||
|
||||
def get_conflicting_transactions(self, tx_hash, tx):
|
||||
"""Returns a set of transaction hashes from the wallet history that are
|
||||
directly conflicting with tx, i.e. they have common outpoints being
|
||||
spent with tx. If the tx is already in wallet history, that will not be
|
||||
reported as a conflict.
|
||||
"""
|
||||
conflicting_txns = set()
|
||||
with self.transaction_lock:
|
||||
for txin in tx.inputs():
|
||||
if txin['type'] == 'coinbase':
|
||||
continue
|
||||
prevout_hash = txin['prevout_hash']
|
||||
prevout_n = txin['prevout_n']
|
||||
spending_tx_hash = self.spent_outpoints[prevout_hash].get(prevout_n)
|
||||
if spending_tx_hash is None:
|
||||
continue
|
||||
# this outpoint has already been spent, by spending_tx
|
||||
assert spending_tx_hash in self.transactions
|
||||
conflicting_txns |= {spending_tx_hash}
|
||||
if tx_hash in conflicting_txns:
|
||||
# this tx is already in history, so it conflicts with itself
|
||||
if len(conflicting_txns) > 1:
|
||||
raise Exception('Found conflicting transactions already in wallet history.')
|
||||
conflicting_txns -= {tx_hash}
|
||||
return conflicting_txns
|
||||
|
||||
def add_transaction(self, tx_hash, tx, allow_unrelated=False):
|
||||
assert tx_hash, tx_hash
|
||||
assert tx, tx
|
||||
assert tx.is_complete()
|
||||
# assert tx_hash == tx.txid() # disabled as expensive; test done by Synchronizer.
|
||||
# we need self.transaction_lock but get_tx_height will take self.lock
|
||||
# so we need to take that too here, to enforce order of locks
|
||||
with self.lock, self.transaction_lock:
|
||||
# NOTE: returning if tx in self.transactions might seem like a good idea
|
||||
# BUT we track is_mine inputs in a txn, and during subsequent calls
|
||||
# of add_transaction tx, we might learn of more-and-more inputs of
|
||||
# being is_mine, as we roll the gap_limit forward
|
||||
is_coinbase = tx.inputs()[0]['type'] == 'coinbase'
|
||||
tx_height = self.get_tx_height(tx_hash).height
|
||||
if not allow_unrelated:
|
||||
# note that during sync, if the transactions are not properly sorted,
|
||||
# it could happen that we think tx is unrelated but actually one of the inputs is is_mine.
|
||||
# this is the main motivation for allow_unrelated
|
||||
is_mine = any([self.is_mine(self.get_txin_address(txin)) for txin in tx.inputs()])
|
||||
is_for_me = any([self.is_mine(self.get_txout_address(txo)) for txo in tx.outputs()])
|
||||
if not is_mine and not is_for_me:
|
||||
raise UnrelatedTransactionException()
|
||||
# Find all conflicting transactions.
|
||||
# In case of a conflict,
|
||||
# 1. confirmed > mempool > local
|
||||
# 2. this new txn has priority over existing ones
|
||||
# When this method exits, there must NOT be any conflict, so
|
||||
# either keep this txn and remove all conflicting (along with dependencies)
|
||||
# or drop this txn
|
||||
conflicting_txns = self.get_conflicting_transactions(tx_hash, tx)
|
||||
if conflicting_txns:
|
||||
existing_mempool_txn = any(
|
||||
self.get_tx_height(tx_hash2).height in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT)
|
||||
for tx_hash2 in conflicting_txns)
|
||||
existing_confirmed_txn = any(
|
||||
self.get_tx_height(tx_hash2).height > 0
|
||||
for tx_hash2 in conflicting_txns)
|
||||
if existing_confirmed_txn and tx_height <= 0:
|
||||
# this is a non-confirmed tx that conflicts with confirmed txns; drop.
|
||||
return False
|
||||
if existing_mempool_txn and tx_height == TX_HEIGHT_LOCAL:
|
||||
# this is a local tx that conflicts with non-local txns; drop.
|
||||
return False
|
||||
# keep this txn and remove all conflicting
|
||||
to_remove = set()
|
||||
to_remove |= conflicting_txns
|
||||
for conflicting_tx_hash in conflicting_txns:
|
||||
to_remove |= self.get_depending_transactions(conflicting_tx_hash)
|
||||
for tx_hash2 in to_remove:
|
||||
self.remove_transaction(tx_hash2)
|
||||
# add inputs
|
||||
def add_value_from_prev_output():
|
||||
dd = self.txo.get(prevout_hash, {})
|
||||
# note: this nested loop takes linear time in num is_mine outputs of prev_tx
|
||||
for addr, outputs in dd.items():
|
||||
# note: instead of [(n, v, is_cb), ...]; we could store: {n -> (v, is_cb)}
|
||||
for n, v, is_cb in outputs:
|
||||
if n == prevout_n:
|
||||
if addr and self.is_mine(addr):
|
||||
if d.get(addr) is None:
|
||||
d[addr] = set()
|
||||
d[addr].add((ser, v))
|
||||
return
|
||||
self.txi[tx_hash] = d = {}
|
||||
for txi in tx.inputs():
|
||||
if txi['type'] == 'coinbase':
|
||||
continue
|
||||
prevout_hash = txi['prevout_hash']
|
||||
prevout_n = txi['prevout_n']
|
||||
ser = prevout_hash + ':%d' % prevout_n
|
||||
self.spent_outpoints[prevout_hash][prevout_n] = tx_hash
|
||||
add_value_from_prev_output()
|
||||
# add outputs
|
||||
self.txo[tx_hash] = d = {}
|
||||
for n, txo in enumerate(tx.outputs()):
|
||||
v = txo[2]
|
||||
ser = tx_hash + ':%d'%n
|
||||
addr = self.get_txout_address(txo)
|
||||
if addr and self.is_mine(addr):
|
||||
if d.get(addr) is None:
|
||||
d[addr] = []
|
||||
d[addr].append((n, v, is_coinbase))
|
||||
# give v to txi that spends me
|
||||
next_tx = self.spent_outpoints[tx_hash].get(n)
|
||||
if next_tx is not None:
|
||||
dd = self.txi.get(next_tx, {})
|
||||
if dd.get(addr) is None:
|
||||
dd[addr] = set()
|
||||
if (ser, v) not in dd[addr]:
|
||||
dd[addr].add((ser, v))
|
||||
self._add_tx_to_local_history(next_tx)
|
||||
# add to local history
|
||||
self._add_tx_to_local_history(tx_hash)
|
||||
# save
|
||||
self.transactions[tx_hash] = tx
|
||||
return True
|
||||
|
||||
def remove_transaction(self, tx_hash):
|
||||
def remove_from_spent_outpoints():
|
||||
# undo spends in spent_outpoints
|
||||
if tx is not None: # if we have the tx, this branch is faster
|
||||
for txin in tx.inputs():
|
||||
if txin['type'] == 'coinbase':
|
||||
continue
|
||||
prevout_hash = txin['prevout_hash']
|
||||
prevout_n = txin['prevout_n']
|
||||
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
|
||||
if not self.spent_outpoints[prevout_hash]:
|
||||
self.spent_outpoints.pop(prevout_hash)
|
||||
else: # expensive but always works
|
||||
for prevout_hash, d in list(self.spent_outpoints.items()):
|
||||
for prevout_n, spending_txid in d.items():
|
||||
if spending_txid == tx_hash:
|
||||
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
|
||||
if not self.spent_outpoints[prevout_hash]:
|
||||
self.spent_outpoints.pop(prevout_hash)
|
||||
# Remove this tx itself; if nothing spends from it.
|
||||
# It is not so clear what to do if other txns spend from it, but it will be
|
||||
# removed when those other txns are removed.
|
||||
if not self.spent_outpoints[tx_hash]:
|
||||
self.spent_outpoints.pop(tx_hash)
|
||||
|
||||
with self.transaction_lock:
|
||||
self.print_error("removing tx from history", tx_hash)
|
||||
tx = self.transactions.pop(tx_hash, None)
|
||||
remove_from_spent_outpoints()
|
||||
self._remove_tx_from_local_history(tx_hash)
|
||||
self.txi.pop(tx_hash, None)
|
||||
self.txo.pop(tx_hash, None)
|
||||
|
||||
def get_depending_transactions(self, tx_hash):
|
||||
"""Returns all (grand-)children of tx_hash in this wallet."""
|
||||
children = set()
|
||||
for other_hash in self.spent_outpoints[tx_hash].values():
|
||||
children.add(other_hash)
|
||||
children |= self.get_depending_transactions(other_hash)
|
||||
return children
|
||||
|
||||
def receive_tx_callback(self, tx_hash, tx, tx_height):
|
||||
self.add_unverified_tx(tx_hash, tx_height)
|
||||
self.add_transaction(tx_hash, tx, allow_unrelated=True)
|
||||
|
||||
def receive_history_callback(self, addr, hist, tx_fees):
|
||||
with self.lock:
|
||||
old_hist = self.get_address_history(addr)
|
||||
for tx_hash, height in old_hist:
|
||||
if (tx_hash, height) not in hist:
|
||||
# make tx local
|
||||
self.unverified_tx.pop(tx_hash, None)
|
||||
self.verified_tx.pop(tx_hash, None)
|
||||
if self.verifier:
|
||||
self.verifier.remove_spv_proof_for_tx(tx_hash)
|
||||
self.history[addr] = hist
|
||||
|
||||
for tx_hash, tx_height in hist:
|
||||
# add it in case it was previously unconfirmed
|
||||
self.add_unverified_tx(tx_hash, tx_height)
|
||||
# if addr is new, we have to recompute txi and txo
|
||||
tx = self.transactions.get(tx_hash)
|
||||
if tx is None:
|
||||
continue
|
||||
self.add_transaction(tx_hash, tx, allow_unrelated=True)
|
||||
|
||||
# Store fees
|
||||
self.tx_fees.update(tx_fees)
|
||||
|
||||
@profiler
|
||||
def load_transactions(self):
|
||||
# load txi, txo, tx_fees
|
||||
# bookkeeping data of is_mine inputs of transactions
|
||||
self.txi = self.storage.get('txi', {}) # txid -> address -> (prev_outpoint, value)
|
||||
for txid, d in list(self.txi.items()):
|
||||
for addr, lst in d.items():
|
||||
self.txi[txid][addr] = set([tuple(x) for x in lst])
|
||||
# bookkeeping data of is_mine outputs of transactions
|
||||
self.txo = self.storage.get('txo', {}) # txid -> address -> (output_index, value, is_coinbase)
|
||||
self.tx_fees = self.storage.get('tx_fees', {})
|
||||
tx_list = self.storage.get('transactions', {})
|
||||
# load transactions
|
||||
self.transactions = {}
|
||||
for tx_hash, raw in tx_list.items():
|
||||
tx = Transaction(raw)
|
||||
self.transactions[tx_hash] = tx
|
||||
if self.txi.get(tx_hash) is None and self.txo.get(tx_hash) is None:
|
||||
self.print_error("removing unreferenced tx", tx_hash)
|
||||
self.transactions.pop(tx_hash)
|
||||
# load spent_outpoints
|
||||
_spent_outpoints = self.storage.get('spent_outpoints', {})
|
||||
self.spent_outpoints = defaultdict(dict)
|
||||
for prevout_hash, d in _spent_outpoints.items():
|
||||
for prevout_n_str, spending_txid in d.items():
|
||||
prevout_n = int(prevout_n_str)
|
||||
if spending_txid not in self.transactions:
|
||||
continue # only care about txns we have
|
||||
self.spent_outpoints[prevout_hash][prevout_n] = spending_txid
|
||||
|
||||
@profiler
|
||||
def load_local_history(self):
|
||||
self._history_local = {} # address -> set(txid)
|
||||
self._address_history_changed_events = defaultdict(asyncio.Event) # address -> Event
|
||||
for txid in itertools.chain(self.txi, self.txo):
|
||||
self._add_tx_to_local_history(txid)
|
||||
|
||||
@profiler
|
||||
def check_history(self):
|
||||
save = False
|
||||
hist_addrs_mine = list(filter(lambda k: self.is_mine(k), self.history.keys()))
|
||||
hist_addrs_not_mine = list(filter(lambda k: not self.is_mine(k), self.history.keys()))
|
||||
for addr in hist_addrs_not_mine:
|
||||
self.history.pop(addr)
|
||||
save = True
|
||||
for addr in hist_addrs_mine:
|
||||
hist = self.history[addr]
|
||||
for tx_hash, tx_height in hist:
|
||||
if self.txi.get(tx_hash) or self.txo.get(tx_hash):
|
||||
continue
|
||||
tx = self.transactions.get(tx_hash)
|
||||
if tx is not None:
|
||||
self.add_transaction(tx_hash, tx, allow_unrelated=True)
|
||||
save = True
|
||||
if save:
|
||||
self.save_transactions()
|
||||
|
||||
def remove_local_transactions_we_dont_have(self):
|
||||
txid_set = set(self.txi) | set(self.txo)
|
||||
for txid in txid_set:
|
||||
tx_height = self.get_tx_height(txid).height
|
||||
if tx_height == TX_HEIGHT_LOCAL and txid not in self.transactions:
|
||||
self.remove_transaction(txid)
|
||||
|
||||
@profiler
|
||||
def save_transactions(self, write=False):
|
||||
with self.transaction_lock:
|
||||
tx = {}
|
||||
for k,v in self.transactions.items():
|
||||
tx[k] = str(v)
|
||||
self.storage.put('transactions', tx)
|
||||
self.storage.put('txi', self.txi)
|
||||
self.storage.put('txo', self.txo)
|
||||
self.storage.put('tx_fees', self.tx_fees)
|
||||
self.storage.put('addr_history', self.history)
|
||||
self.storage.put('spent_outpoints', self.spent_outpoints)
|
||||
if write:
|
||||
self.storage.write()
|
||||
|
||||
def save_verified_tx(self, write=False):
|
||||
with self.lock:
|
||||
verified_tx_to_save = {}
|
||||
for txid, tx_info in self.verified_tx.items():
|
||||
verified_tx_to_save[txid] = (tx_info.height, tx_info.timestamp,
|
||||
tx_info.txpos, tx_info.header_hash, tx_info.flodata)
|
||||
self.storage.put('verified_tx3', verified_tx_to_save)
|
||||
if write:
|
||||
self.storage.write()
|
||||
|
||||
def clear_history(self):
|
||||
with self.lock:
|
||||
with self.transaction_lock:
|
||||
self.txi = {}
|
||||
self.txo = {}
|
||||
self.tx_fees = {}
|
||||
self.spent_outpoints = defaultdict(dict)
|
||||
self.history = {}
|
||||
self.verified_tx = {}
|
||||
self.transactions = {} # type: Dict[str, Transaction]
|
||||
self.save_transactions()
|
||||
|
||||
def get_txpos(self, tx_hash):
|
||||
"""Returns (height, txpos) tuple, even if the tx is unverified."""
|
||||
with self.lock:
|
||||
if tx_hash in self.verified_tx:
|
||||
info = self.verified_tx[tx_hash]
|
||||
return info.height, info.txpos
|
||||
elif tx_hash in self.unverified_tx:
|
||||
height = self.unverified_tx[tx_hash]
|
||||
return (height, 0) if height > 0 else ((1e9 - height), 0)
|
||||
else:
|
||||
return (1e9+1, 0)
|
||||
|
||||
def with_local_height_cached(func):
|
||||
# get local height only once, as it's relatively expensive.
|
||||
# take care that nested calls work as expected
|
||||
def f(self, *args, **kwargs):
|
||||
orig_val = getattr(self.threadlocal_cache, 'local_height', None)
|
||||
self.threadlocal_cache.local_height = orig_val or self.get_local_height()
|
||||
try:
|
||||
return func(self, *args, **kwargs)
|
||||
finally:
|
||||
self.threadlocal_cache.local_height = orig_val
|
||||
return f
|
||||
|
||||
@with_local_height_cached
|
||||
def get_history(self, domain=None):
|
||||
# get domain
|
||||
if domain is None:
|
||||
domain = self.history.keys()
|
||||
domain = set(domain)
|
||||
# 1. Get the history of each address in the domain, maintain the
|
||||
# delta of a tx as the sum of its deltas on domain addresses
|
||||
tx_deltas = defaultdict(int)
|
||||
for addr in domain:
|
||||
h = self.get_address_history(addr)
|
||||
for tx_hash, height in h:
|
||||
delta = self.get_tx_delta(tx_hash, addr)
|
||||
if delta is None or tx_deltas[tx_hash] is None:
|
||||
tx_deltas[tx_hash] = None
|
||||
else:
|
||||
tx_deltas[tx_hash] += delta
|
||||
# 2. create sorted history
|
||||
history = []
|
||||
for tx_hash in tx_deltas:
|
||||
delta = tx_deltas[tx_hash]
|
||||
tx_mined_status = self.get_tx_height(tx_hash)
|
||||
history.append((tx_hash, tx_mined_status, delta))
|
||||
history.sort(key = lambda x: self.get_txpos(x[0]), reverse=True)
|
||||
# 3. add balance
|
||||
c, u, x = self.get_balance(domain)
|
||||
balance = c + u + x
|
||||
h2 = []
|
||||
for tx_hash, tx_mined_status, delta in history:
|
||||
h2.append((tx_hash, tx_mined_status, delta, balance))
|
||||
if balance is None or delta is None:
|
||||
balance = None
|
||||
else:
|
||||
balance -= delta
|
||||
h2.reverse()
|
||||
# fixme: this may happen if history is incomplete
|
||||
if balance not in [None, 0]:
|
||||
self.print_error("Error: history not synchronized")
|
||||
return []
|
||||
|
||||
return h2
|
||||
|
||||
def _add_tx_to_local_history(self, txid):
|
||||
with self.transaction_lock:
|
||||
for addr in itertools.chain(self.txi.get(txid, []), self.txo.get(txid, [])):
|
||||
cur_hist = self._history_local.get(addr, set())
|
||||
cur_hist.add(txid)
|
||||
self._history_local[addr] = cur_hist
|
||||
self._mark_address_history_changed(addr)
|
||||
|
||||
def _remove_tx_from_local_history(self, txid):
|
||||
with self.transaction_lock:
|
||||
for addr in itertools.chain(self.txi.get(txid, []), self.txo.get(txid, [])):
|
||||
cur_hist = self._history_local.get(addr, set())
|
||||
try:
|
||||
cur_hist.remove(txid)
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
self._history_local[addr] = cur_hist
|
||||
|
||||
def _mark_address_history_changed(self, addr: str) -> None:
|
||||
# history for this address changed, wake up coroutines:
|
||||
self._address_history_changed_events[addr].set()
|
||||
# clear event immediately so that coroutines can wait() for the next change:
|
||||
self._address_history_changed_events[addr].clear()
|
||||
|
||||
async def wait_for_address_history_to_change(self, addr: str) -> None:
|
||||
"""Wait until the server tells us about a new transaction related to addr.
|
||||
|
||||
Unconfirmed and confirmed transactions are not distinguished, and so e.g. SPV
|
||||
is not taken into account.
|
||||
"""
|
||||
assert self.is_mine(addr), "address needs to be is_mine to be watched"
|
||||
await self._address_history_changed_events[addr].wait()
|
||||
|
||||
def add_unverified_tx(self, tx_hash, tx_height):
|
||||
if tx_hash in self.verified_tx:
|
||||
if tx_height in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT):
|
||||
with self.lock:
|
||||
self.verified_tx.pop(tx_hash)
|
||||
if self.verifier:
|
||||
self.verifier.remove_spv_proof_for_tx(tx_hash)
|
||||
else:
|
||||
with self.lock:
|
||||
# tx will be verified only if height > 0
|
||||
self.unverified_tx[tx_hash] = tx_height
|
||||
|
||||
def remove_unverified_tx(self, tx_hash, tx_height):
|
||||
with self.lock:
|
||||
new_height = self.unverified_tx.get(tx_hash)
|
||||
if new_height == tx_height:
|
||||
self.unverified_tx.pop(tx_hash, None)
|
||||
|
||||
def add_verified_tx(self, tx_hash: str, info: TxMinedInfo):
|
||||
# Remove from the unverified map and add to the verified map
|
||||
with self.lock:
|
||||
self.unverified_tx.pop(tx_hash, None)
|
||||
self.verified_tx[tx_hash] = info
|
||||
tx_mined_status = self.get_tx_height(tx_hash)
|
||||
self.network.trigger_callback('verified', self, tx_hash, tx_mined_status)
|
||||
|
||||
def get_unverified_txs(self):
|
||||
'''Returns a map from tx hash to transaction height'''
|
||||
with self.lock:
|
||||
return dict(self.unverified_tx) # copy
|
||||
|
||||
def undo_verifications(self, blockchain, height):
|
||||
'''Used by the verifier when a reorg has happened'''
|
||||
txs = set()
|
||||
with self.lock:
|
||||
for tx_hash, info in list(self.verified_tx.items()):
|
||||
tx_height = info.height
|
||||
if tx_height >= height:
|
||||
header = blockchain.read_header(tx_height)
|
||||
if not header or hash_header(header) != info.header_hash:
|
||||
self.verified_tx.pop(tx_hash, None)
|
||||
# NOTE: we should add these txns to self.unverified_tx,
|
||||
# but with what height?
|
||||
# If on the new fork after the reorg, the txn is at the
|
||||
# same height, we will not get a status update for the
|
||||
# address. If the txn is not mined or at a diff height,
|
||||
# we should get a status update. Unless we put tx into
|
||||
# unverified_tx, it will turn into local. So we put it
|
||||
# into unverified_tx with the old height, and if we get
|
||||
# a status update, that will overwrite it.
|
||||
self.unverified_tx[tx_hash] = tx_height
|
||||
txs.add(tx_hash)
|
||||
return txs
|
||||
|
||||
def get_local_height(self):
|
||||
""" return last known height if we are offline """
|
||||
cached_local_height = getattr(self.threadlocal_cache, 'local_height', None)
|
||||
if cached_local_height is not None:
|
||||
return cached_local_height
|
||||
return self.network.get_local_height() if self.network else self.storage.get('stored_height', 0)
|
||||
|
||||
def get_tx_height(self, tx_hash: str) -> TxMinedInfo:
|
||||
with self.lock:
|
||||
if tx_hash in self.verified_tx:
|
||||
info = self.verified_tx[tx_hash]
|
||||
conf = max(self.get_local_height() - info.height + 1, 0)
|
||||
return info._replace(conf=conf)
|
||||
elif tx_hash in self.unverified_tx:
|
||||
height = self.unverified_tx[tx_hash]
|
||||
return TxMinedInfo(height=height, conf=0)
|
||||
else:
|
||||
# local transaction
|
||||
return TxMinedInfo(height=TX_HEIGHT_LOCAL, conf=0)
|
||||
|
||||
def get_flodata(self, tx_hash: str):
|
||||
""" Given a transaction, returns flodata """
|
||||
with self.lock:
|
||||
if tx_hash in self.verified_tx:
|
||||
info = self.verified_tx[tx_hash]
|
||||
flodata = info[5]
|
||||
return flodata
|
||||
elif tx_hash in self.unverified_tx:
|
||||
tx = self.transactions.get(tx_hash)
|
||||
flodata = tx.flodata[5:]
|
||||
return flodata
|
||||
else:
|
||||
# local transaction
|
||||
tx = self.transactions.get(tx_hash)
|
||||
flodata = tx.flodata[5:]
|
||||
return flodata
|
||||
|
||||
def set_up_to_date(self, up_to_date):
|
||||
with self.lock:
|
||||
self.up_to_date = up_to_date
|
||||
if self.network:
|
||||
self.network.notify('status')
|
||||
if up_to_date:
|
||||
self.save_transactions(write=True)
|
||||
# if the verifier is also up to date, persist that too;
|
||||
# otherwise it will persist its results when it finishes
|
||||
if self.verifier and self.verifier.is_up_to_date():
|
||||
self.save_verified_tx(write=True)
|
||||
|
||||
def is_up_to_date(self):
|
||||
with self.lock: return self.up_to_date
|
||||
|
||||
@with_transaction_lock
|
||||
def get_tx_delta(self, tx_hash, address):
|
||||
"""effect of tx on address"""
|
||||
delta = 0
|
||||
# substract the value of coins sent from address
|
||||
d = self.txi.get(tx_hash, {}).get(address, [])
|
||||
for n, v in d:
|
||||
delta -= v
|
||||
# add the value of the coins received at address
|
||||
d = self.txo.get(tx_hash, {}).get(address, [])
|
||||
for n, v, cb in d:
|
||||
delta += v
|
||||
return delta
|
||||
|
||||
@with_transaction_lock
|
||||
def get_tx_value(self, txid):
|
||||
"""effect of tx on the entire domain"""
|
||||
delta = 0
|
||||
for addr, d in self.txi.get(txid, {}).items():
|
||||
for n, v in d:
|
||||
delta -= v
|
||||
for addr, d in self.txo.get(txid, {}).items():
|
||||
for n, v, cb in d:
|
||||
delta += v
|
||||
return delta
|
||||
|
||||
def get_wallet_delta(self, tx: Transaction):
|
||||
""" effect of tx on wallet """
|
||||
is_relevant = False # "related to wallet?"
|
||||
is_mine = False
|
||||
is_pruned = False
|
||||
is_partial = False
|
||||
v_in = v_out = v_out_mine = 0
|
||||
for txin in tx.inputs():
|
||||
addr = self.get_txin_address(txin)
|
||||
if self.is_mine(addr):
|
||||
is_mine = True
|
||||
is_relevant = True
|
||||
d = self.txo.get(txin['prevout_hash'], {}).get(addr, [])
|
||||
for n, v, cb in d:
|
||||
if n == txin['prevout_n']:
|
||||
value = v
|
||||
break
|
||||
else:
|
||||
value = None
|
||||
if value is None:
|
||||
is_pruned = True
|
||||
else:
|
||||
v_in += value
|
||||
else:
|
||||
is_partial = True
|
||||
if not is_mine:
|
||||
is_partial = False
|
||||
for o in tx.outputs():
|
||||
v_out += o.value
|
||||
if self.is_mine(o.address):
|
||||
v_out_mine += o.value
|
||||
is_relevant = True
|
||||
if is_pruned:
|
||||
# some inputs are mine:
|
||||
fee = None
|
||||
if is_mine:
|
||||
v = v_out_mine - v_out
|
||||
else:
|
||||
# no input is mine
|
||||
v = v_out_mine
|
||||
else:
|
||||
v = v_out_mine - v_in
|
||||
if is_partial:
|
||||
# some inputs are mine, but not all
|
||||
fee = None
|
||||
else:
|
||||
# all inputs are mine
|
||||
fee = v_in - v_out
|
||||
if not is_mine:
|
||||
fee = None
|
||||
return is_relevant, is_mine, v, fee
|
||||
|
||||
def get_tx_fee(self, tx: Transaction) -> Optional[int]:
|
||||
if not tx:
|
||||
return None
|
||||
if hasattr(tx, '_cached_fee'):
|
||||
return tx._cached_fee
|
||||
with self.lock, self.transaction_lock:
|
||||
is_relevant, is_mine, v, fee = self.get_wallet_delta(tx)
|
||||
if fee is None:
|
||||
txid = tx.txid()
|
||||
fee = self.tx_fees.get(txid)
|
||||
# only cache non-None, as None can still change while syncing
|
||||
if fee is not None:
|
||||
tx._cached_fee = fee
|
||||
return fee
|
||||
|
||||
def get_addr_io(self, address):
|
||||
with self.lock, self.transaction_lock:
|
||||
h = self.get_address_history(address)
|
||||
received = {}
|
||||
sent = {}
|
||||
for tx_hash, height in h:
|
||||
l = self.txo.get(tx_hash, {}).get(address, [])
|
||||
for n, v, is_cb in l:
|
||||
received[tx_hash + ':%d'%n] = (height, v, is_cb)
|
||||
for tx_hash, height in h:
|
||||
l = self.txi.get(tx_hash, {}).get(address, [])
|
||||
for txi, v in l:
|
||||
sent[txi] = height
|
||||
return received, sent
|
||||
|
||||
def get_addr_utxo(self, address):
|
||||
coins, spent = self.get_addr_io(address)
|
||||
for txi in spent:
|
||||
coins.pop(txi)
|
||||
out = {}
|
||||
for txo, v in coins.items():
|
||||
tx_height, value, is_cb = v
|
||||
prevout_hash, prevout_n = txo.split(':')
|
||||
x = {
|
||||
'address':address,
|
||||
'value':value,
|
||||
'prevout_n':int(prevout_n),
|
||||
'prevout_hash':prevout_hash,
|
||||
'height':tx_height,
|
||||
'coinbase':is_cb
|
||||
}
|
||||
out[txo] = x
|
||||
return out
|
||||
|
||||
# return the total amount ever received by an address
|
||||
def get_addr_received(self, address):
|
||||
received, sent = self.get_addr_io(address)
|
||||
return sum([v for height, v, is_cb in received.values()])
|
||||
|
||||
@with_local_height_cached
|
||||
def get_addr_balance(self, address):
|
||||
"""Return the balance of a FLO address:
|
||||
confirmed and matured, unconfirmed, unmatured
|
||||
"""
|
||||
received, sent = self.get_addr_io(address)
|
||||
c = u = x = 0
|
||||
local_height = self.get_local_height()
|
||||
for txo, (tx_height, v, is_cb) in received.items():
|
||||
if is_cb and tx_height + COINBASE_MATURITY > local_height:
|
||||
x += v
|
||||
elif tx_height > 0:
|
||||
c += v
|
||||
else:
|
||||
u += v
|
||||
if txo in sent:
|
||||
if sent[txo] > 0:
|
||||
c -= v
|
||||
else:
|
||||
u -= v
|
||||
return c, u, x
|
||||
|
||||
@with_local_height_cached
|
||||
def get_utxos(self, domain=None, excluded=None, mature=False, confirmed_only=False, nonlocal_only=False):
|
||||
coins = []
|
||||
if domain is None:
|
||||
domain = self.get_addresses()
|
||||
domain = set(domain)
|
||||
if excluded:
|
||||
domain = set(domain) - excluded
|
||||
for addr in domain:
|
||||
utxos = self.get_addr_utxo(addr)
|
||||
for x in utxos.values():
|
||||
if confirmed_only and x['height'] <= 0:
|
||||
continue
|
||||
if nonlocal_only and x['height'] == TX_HEIGHT_LOCAL:
|
||||
continue
|
||||
if mature and x['coinbase'] and x['height'] + COINBASE_MATURITY > self.get_local_height():
|
||||
continue
|
||||
coins.append(x)
|
||||
continue
|
||||
return coins
|
||||
|
||||
def get_balance(self, domain=None):
|
||||
if domain is None:
|
||||
domain = self.get_addresses()
|
||||
domain = set(domain)
|
||||
cc = uu = xx = 0
|
||||
for addr in domain:
|
||||
c, u, x = self.get_addr_balance(addr)
|
||||
cc += c
|
||||
uu += u
|
||||
xx += x
|
||||
return cc, uu, xx
|
||||
|
||||
def is_used(self, address):
|
||||
h = self.history.get(address,[])
|
||||
return len(h) != 0
|
||||
|
||||
def is_empty(self, address):
|
||||
c, u, x = self.get_addr_balance(address)
|
||||
return c+u+x == 0
|
||||
|
||||
def synchronize(self):
|
||||
pass
|
||||
@ -1,269 +0,0 @@
|
||||
# Copyright (C) 2018 The Electrum developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file LICENCE or http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import hashlib
|
||||
from typing import List
|
||||
|
||||
from .util import bfh, bh2u, BitcoinException, print_error
|
||||
from . import constants
|
||||
from . import ecc
|
||||
from .crypto import hash_160, hmac_oneshot
|
||||
from .bitcoin import rev_hex, int_to_hex, EncodeBase58Check, DecodeBase58Check
|
||||
|
||||
|
||||
BIP32_PRIME = 0x80000000
|
||||
|
||||
|
||||
def protect_against_invalid_ecpoint(func):
|
||||
def func_wrapper(*args):
|
||||
n = args[-1]
|
||||
while True:
|
||||
is_prime = n & BIP32_PRIME
|
||||
try:
|
||||
return func(*args[:-1], n=n)
|
||||
except ecc.InvalidECPointException:
|
||||
print_error('bip32 protect_against_invalid_ecpoint: skipping index')
|
||||
n += 1
|
||||
is_prime2 = n & BIP32_PRIME
|
||||
if is_prime != is_prime2: raise OverflowError()
|
||||
return func_wrapper
|
||||
|
||||
|
||||
# Child private key derivation function (from master private key)
|
||||
# k = master private key (32 bytes)
|
||||
# c = master chain code (extra entropy for key derivation) (32 bytes)
|
||||
# n = the index of the key we want to derive. (only 32 bits will be used)
|
||||
# If n is hardened (i.e. the 32nd bit is set), the resulting private key's
|
||||
# corresponding public key can NOT be determined without the master private key.
|
||||
# However, if n is not hardened, the resulting private key's corresponding
|
||||
# public key can be determined without the master private key.
|
||||
@protect_against_invalid_ecpoint
|
||||
def CKD_priv(k, c, n):
|
||||
if n < 0: raise ValueError('the bip32 index needs to be non-negative')
|
||||
is_prime = n & BIP32_PRIME
|
||||
return _CKD_priv(k, c, bfh(rev_hex(int_to_hex(n,4))), is_prime)
|
||||
|
||||
|
||||
def _CKD_priv(k, c, s, is_prime):
|
||||
try:
|
||||
keypair = ecc.ECPrivkey(k)
|
||||
except ecc.InvalidECPointException as e:
|
||||
raise BitcoinException('Impossible xprv (not within curve order)') from e
|
||||
cK = keypair.get_public_key_bytes(compressed=True)
|
||||
data = bytes([0]) + k + s if is_prime else cK + s
|
||||
I = hmac_oneshot(c, data, hashlib.sha512)
|
||||
I_left = ecc.string_to_number(I[0:32])
|
||||
k_n = (I_left + ecc.string_to_number(k)) % ecc.CURVE_ORDER
|
||||
if I_left >= ecc.CURVE_ORDER or k_n == 0:
|
||||
raise ecc.InvalidECPointException()
|
||||
k_n = ecc.number_to_string(k_n, ecc.CURVE_ORDER)
|
||||
c_n = I[32:]
|
||||
return k_n, c_n
|
||||
|
||||
# Child public key derivation function (from public key only)
|
||||
# K = master public key
|
||||
# c = master chain code
|
||||
# n = index of key we want to derive
|
||||
# This function allows us to find the nth public key, as long as n is
|
||||
# not hardened. If n is hardened, we need the master private key to find it.
|
||||
@protect_against_invalid_ecpoint
|
||||
def CKD_pub(cK, c, n):
|
||||
if n < 0: raise ValueError('the bip32 index needs to be non-negative')
|
||||
if n & BIP32_PRIME: raise Exception()
|
||||
return _CKD_pub(cK, c, bfh(rev_hex(int_to_hex(n,4))))
|
||||
|
||||
# helper function, callable with arbitrary string.
|
||||
# note: 's' does not need to fit into 32 bits here! (c.f. trustedcoin billing)
|
||||
def _CKD_pub(cK, c, s):
|
||||
I = hmac_oneshot(c, cK + s, hashlib.sha512)
|
||||
pubkey = ecc.ECPrivkey(I[0:32]) + ecc.ECPubkey(cK)
|
||||
if pubkey.is_at_infinity():
|
||||
raise ecc.InvalidECPointException()
|
||||
cK_n = pubkey.get_public_key_bytes(compressed=True)
|
||||
c_n = I[32:]
|
||||
return cK_n, c_n
|
||||
|
||||
|
||||
def xprv_header(xtype, *, net=None):
|
||||
if net is None:
|
||||
net = constants.net
|
||||
return bfh("%08x" % net.XPRV_HEADERS[xtype])
|
||||
|
||||
|
||||
def xpub_header(xtype, *, net=None):
|
||||
if net is None:
|
||||
net = constants.net
|
||||
return bfh("%08x" % net.XPUB_HEADERS[xtype])
|
||||
|
||||
|
||||
def serialize_xprv(xtype, c, k, depth=0, fingerprint=b'\x00'*4,
|
||||
child_number=b'\x00'*4, *, net=None):
|
||||
if not ecc.is_secret_within_curve_range(k):
|
||||
raise BitcoinException('Impossible xprv (not within curve order)')
|
||||
xprv = xprv_header(xtype, net=net) \
|
||||
+ bytes([depth]) + fingerprint + child_number + c + bytes([0]) + k
|
||||
return EncodeBase58Check(xprv)
|
||||
|
||||
|
||||
def serialize_xpub(xtype, c, cK, depth=0, fingerprint=b'\x00'*4,
|
||||
child_number=b'\x00'*4, *, net=None):
|
||||
xpub = xpub_header(xtype, net=net) \
|
||||
+ bytes([depth]) + fingerprint + child_number + c + cK
|
||||
return EncodeBase58Check(xpub)
|
||||
|
||||
|
||||
class InvalidMasterKeyVersionBytes(BitcoinException): pass
|
||||
|
||||
|
||||
def deserialize_xkey(xkey, prv, *, net=None):
|
||||
if net is None:
|
||||
net = constants.net
|
||||
xkey = DecodeBase58Check(xkey)
|
||||
if len(xkey) != 78:
|
||||
raise BitcoinException('Invalid length for extended key: {}'
|
||||
.format(len(xkey)))
|
||||
depth = xkey[4]
|
||||
fingerprint = xkey[5:9]
|
||||
child_number = xkey[9:13]
|
||||
c = xkey[13:13+32]
|
||||
header = int.from_bytes(xkey[0:4], byteorder='big')
|
||||
headers = net.XPRV_HEADERS if prv else net.XPUB_HEADERS
|
||||
if header not in headers.values():
|
||||
raise InvalidMasterKeyVersionBytes('Invalid extended key format: {}'
|
||||
.format(hex(header)))
|
||||
xtype = list(headers.keys())[list(headers.values()).index(header)]
|
||||
n = 33 if prv else 32
|
||||
K_or_k = xkey[13+n:]
|
||||
if prv and not ecc.is_secret_within_curve_range(K_or_k):
|
||||
raise BitcoinException('Impossible xprv (not within curve order)')
|
||||
return xtype, depth, fingerprint, child_number, c, K_or_k
|
||||
|
||||
|
||||
def deserialize_xpub(xkey, *, net=None):
|
||||
return deserialize_xkey(xkey, False, net=net)
|
||||
|
||||
def deserialize_xprv(xkey, *, net=None):
|
||||
return deserialize_xkey(xkey, True, net=net)
|
||||
|
||||
def xpub_type(x):
|
||||
return deserialize_xpub(x)[0]
|
||||
|
||||
|
||||
def is_xpub(text):
|
||||
try:
|
||||
deserialize_xpub(text)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def is_xprv(text):
|
||||
try:
|
||||
deserialize_xprv(text)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def xpub_from_xprv(xprv):
|
||||
xtype, depth, fingerprint, child_number, c, k = deserialize_xprv(xprv)
|
||||
cK = ecc.ECPrivkey(k).get_public_key_bytes(compressed=True)
|
||||
return serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
|
||||
|
||||
|
||||
def bip32_root(seed, xtype):
|
||||
I = hmac_oneshot(b"Bitcoin seed", seed, hashlib.sha512)
|
||||
master_k = I[0:32]
|
||||
master_c = I[32:]
|
||||
# create xprv first, as that will check if master_k is within curve order
|
||||
xprv = serialize_xprv(xtype, master_c, master_k)
|
||||
cK = ecc.ECPrivkey(master_k).get_public_key_bytes(compressed=True)
|
||||
xpub = serialize_xpub(xtype, master_c, cK)
|
||||
return xprv, xpub
|
||||
|
||||
|
||||
def xpub_from_pubkey(xtype, cK):
|
||||
if cK[0] not in (0x02, 0x03):
|
||||
raise ValueError('Unexpected first byte: {}'.format(cK[0]))
|
||||
return serialize_xpub(xtype, b'\x00'*32, cK)
|
||||
|
||||
|
||||
def bip32_derivation(s: str) -> int:
|
||||
if not s.startswith('m/'):
|
||||
raise ValueError('invalid bip32 derivation path: {}'.format(s))
|
||||
s = s[2:]
|
||||
for n in s.split('/'):
|
||||
if n == '': continue
|
||||
i = int(n[:-1]) + BIP32_PRIME if n[-1] == "'" else int(n)
|
||||
yield i
|
||||
|
||||
def convert_bip32_path_to_list_of_uint32(n: str) -> List[int]:
|
||||
"""Convert bip32 path to list of uint32 integers with prime flags
|
||||
m/0/-1/1' -> [0, 0x80000001, 0x80000001]
|
||||
|
||||
based on code in trezorlib
|
||||
"""
|
||||
path = []
|
||||
for x in n.split('/')[1:]:
|
||||
if x == '': continue
|
||||
prime = 0
|
||||
if x.endswith("'"):
|
||||
x = x.replace('\'', '')
|
||||
prime = BIP32_PRIME
|
||||
if x.startswith('-'):
|
||||
prime = BIP32_PRIME
|
||||
path.append(abs(int(x)) | prime)
|
||||
return path
|
||||
|
||||
def is_bip32_derivation(x: str) -> bool:
|
||||
try:
|
||||
[ i for i in bip32_derivation(x)]
|
||||
return True
|
||||
except :
|
||||
return False
|
||||
|
||||
def bip32_private_derivation(xprv, branch, sequence):
|
||||
if not sequence.startswith(branch):
|
||||
raise ValueError('incompatible branch ({}) and sequence ({})'
|
||||
.format(branch, sequence))
|
||||
if branch == sequence:
|
||||
return xprv, xpub_from_xprv(xprv)
|
||||
xtype, depth, fingerprint, child_number, c, k = deserialize_xprv(xprv)
|
||||
sequence = sequence[len(branch):]
|
||||
for n in sequence.split('/'):
|
||||
if n == '': continue
|
||||
i = int(n[:-1]) + BIP32_PRIME if n[-1] == "'" else int(n)
|
||||
parent_k = k
|
||||
k, c = CKD_priv(k, c, i)
|
||||
depth += 1
|
||||
parent_cK = ecc.ECPrivkey(parent_k).get_public_key_bytes(compressed=True)
|
||||
fingerprint = hash_160(parent_cK)[0:4]
|
||||
child_number = bfh("%08X"%i)
|
||||
cK = ecc.ECPrivkey(k).get_public_key_bytes(compressed=True)
|
||||
xpub = serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
|
||||
xprv = serialize_xprv(xtype, c, k, depth, fingerprint, child_number)
|
||||
return xprv, xpub
|
||||
|
||||
|
||||
def bip32_public_derivation(xpub, branch, sequence):
|
||||
xtype, depth, fingerprint, child_number, c, cK = deserialize_xpub(xpub)
|
||||
if not sequence.startswith(branch):
|
||||
raise ValueError('incompatible branch ({}) and sequence ({})'
|
||||
.format(branch, sequence))
|
||||
sequence = sequence[len(branch):]
|
||||
for n in sequence.split('/'):
|
||||
if n == '': continue
|
||||
i = int(n)
|
||||
parent_cK = cK
|
||||
cK, c = CKD_pub(cK, c, i)
|
||||
depth += 1
|
||||
fingerprint = hash_160(parent_cK)[0:4]
|
||||
child_number = bfh("%08X"%i)
|
||||
return serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
|
||||
|
||||
|
||||
def bip32_private_key(sequence, k, chain):
|
||||
for i in sequence:
|
||||
k, chain = CKD_priv(k, chain, i)
|
||||
return k
|
||||
@ -1,732 +0,0 @@
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2012 thomasv@ecdsa.org
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
import os
|
||||
import threading
|
||||
from typing import Optional, Dict
|
||||
|
||||
from . import util
|
||||
from .bitcoin import hash_encode, int_to_hex, rev_hex
|
||||
from .crypto import sha256d
|
||||
from . import constants
|
||||
from .util import bfh, bh2u
|
||||
from .simple_config import SimpleConfig
|
||||
|
||||
try:
|
||||
import pylibscrypt
|
||||
getPoWHash = lambda x: pylibscrypt.scrypt(password=x, salt=x, N=1024, r=1, p=1, olen=32)
|
||||
except ImportError:
|
||||
util.print_msg("Warning: package pylibscrypt not available")
|
||||
|
||||
|
||||
HEADER_SIZE = 80 # bytes
|
||||
MAX_TARGET = 0x00000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
|
||||
|
||||
|
||||
class MissingHeader(Exception):
|
||||
pass
|
||||
|
||||
class InvalidHeader(Exception):
|
||||
pass
|
||||
|
||||
def serialize_header(header_dict: dict) -> str:
|
||||
s = int_to_hex(header_dict['version'], 4) \
|
||||
+ rev_hex(header_dict['prev_block_hash']) \
|
||||
+ rev_hex(header_dict['merkle_root']) \
|
||||
+ int_to_hex(int(header_dict['timestamp']), 4) \
|
||||
+ int_to_hex(int(header_dict['bits']), 4) \
|
||||
+ int_to_hex(int(header_dict['nonce']), 4)
|
||||
return s
|
||||
|
||||
def deserialize_header(s: bytes, height: int) -> dict:
|
||||
if not s:
|
||||
raise InvalidHeader('Invalid header: {}'.format(s))
|
||||
if len(s) != HEADER_SIZE:
|
||||
raise InvalidHeader('Invalid header length: {}'.format(len(s)))
|
||||
hex_to_int = lambda s: int.from_bytes(s, byteorder='little')
|
||||
h = {}
|
||||
h['version'] = hex_to_int(s[0:4])
|
||||
h['prev_block_hash'] = hash_encode(s[4:36])
|
||||
h['merkle_root'] = hash_encode(s[36:68])
|
||||
h['timestamp'] = hex_to_int(s[68:72])
|
||||
h['bits'] = hex_to_int(s[72:76])
|
||||
h['nonce'] = hex_to_int(s[76:80])
|
||||
h['block_height'] = height
|
||||
return h
|
||||
|
||||
def hash_header(header: dict) -> str:
|
||||
if header is None:
|
||||
return '0' * 64
|
||||
if header.get('prev_block_hash') is None:
|
||||
header['prev_block_hash'] = '00'*32
|
||||
return hash_raw_header(serialize_header(header))
|
||||
|
||||
|
||||
def hash_raw_header(header: str) -> str:
|
||||
return hash_encode(sha256d(bfh(header)))
|
||||
|
||||
|
||||
# key: blockhash hex at forkpoint
|
||||
# the chain at some key is the best chain that includes the given hash
|
||||
blockchains = {} # type: Dict[str, Blockchain]
|
||||
blockchains_lock = threading.RLock()
|
||||
|
||||
|
||||
def read_blockchains(config: 'SimpleConfig'):
|
||||
best_chain = Blockchain(config=config,
|
||||
forkpoint=0,
|
||||
parent=None,
|
||||
forkpoint_hash=constants.net.GENESIS,
|
||||
prev_hash=None)
|
||||
blockchains[constants.net.GENESIS] = best_chain
|
||||
# consistency checks
|
||||
if best_chain.height() > constants.net.max_checkpoint():
|
||||
header_after_cp = best_chain.read_header(constants.net.max_checkpoint()+1)
|
||||
if not header_after_cp or not best_chain.can_connect(header_after_cp, check_height=False):
|
||||
util.print_error("[blockchain] deleting best chain. cannot connect header after last cp to last cp.")
|
||||
os.unlink(best_chain.path())
|
||||
best_chain.update_size()
|
||||
# forks
|
||||
fdir = os.path.join(util.get_headers_dir(config), 'forks')
|
||||
util.make_dir(fdir)
|
||||
# files are named as: fork2_{forkpoint}_{prev_hash}_{first_hash}
|
||||
l = filter(lambda x: x.startswith('fork2_') and '.' not in x, os.listdir(fdir))
|
||||
l = sorted(l, key=lambda x: int(x.split('_')[1])) # sort by forkpoint
|
||||
|
||||
def delete_chain(filename, reason):
|
||||
util.print_error(f"[blockchain] deleting chain {filename}: {reason}")
|
||||
os.unlink(os.path.join(fdir, filename))
|
||||
|
||||
def instantiate_chain(filename):
|
||||
__, forkpoint, prev_hash, first_hash = filename.split('_')
|
||||
forkpoint = int(forkpoint)
|
||||
prev_hash = (64-len(prev_hash)) * "0" + prev_hash # left-pad with zeroes
|
||||
first_hash = (64-len(first_hash)) * "0" + first_hash
|
||||
# forks below the max checkpoint are not allowed
|
||||
if forkpoint <= constants.net.max_checkpoint():
|
||||
delete_chain(filename, "deleting fork below max checkpoint")
|
||||
return
|
||||
# find parent (sorting by forkpoint guarantees it's already instantiated)
|
||||
for parent in blockchains.values():
|
||||
if parent.check_hash(forkpoint - 1, prev_hash):
|
||||
break
|
||||
else:
|
||||
delete_chain(filename, "cannot find parent for chain")
|
||||
return
|
||||
b = Blockchain(config=config,
|
||||
forkpoint=forkpoint,
|
||||
parent=parent,
|
||||
forkpoint_hash=first_hash,
|
||||
prev_hash=prev_hash)
|
||||
# consistency checks
|
||||
h = b.read_header(b.forkpoint)
|
||||
if first_hash != hash_header(h):
|
||||
delete_chain(filename, "incorrect first hash for chain")
|
||||
return
|
||||
if not b.parent.can_connect(h, check_height=False):
|
||||
delete_chain(filename, "cannot connect chain to parent")
|
||||
return
|
||||
chain_id = b.get_id()
|
||||
assert first_hash == chain_id, (first_hash, chain_id)
|
||||
blockchains[chain_id] = b
|
||||
|
||||
for filename in l:
|
||||
instantiate_chain(filename)
|
||||
|
||||
|
||||
def pow_hash_header(header):
|
||||
return hash_encode(getPoWHash(bfh(serialize_header(header))))
|
||||
|
||||
|
||||
def get_best_chain() -> 'Blockchain':
|
||||
return blockchains[constants.net.GENESIS]
|
||||
|
||||
# block hash -> chain work; up to and including that block
|
||||
_CHAINWORK_CACHE = {
|
||||
"0000000000000000000000000000000000000000000000000000000000000000": 0, # virtual block at height -1
|
||||
} # type: Dict[str, int]
|
||||
|
||||
|
||||
class Blockchain(util.PrintError):
|
||||
"""
|
||||
Manages blockchain headers and their verification
|
||||
"""
|
||||
|
||||
def __init__(self, config: SimpleConfig, forkpoint: int, parent: Optional['Blockchain'],
|
||||
forkpoint_hash: str, prev_hash: Optional[str]):
|
||||
assert isinstance(forkpoint_hash, str) and len(forkpoint_hash) == 64, forkpoint_hash
|
||||
assert (prev_hash is None) or (isinstance(prev_hash, str) and len(prev_hash) == 64), prev_hash
|
||||
# assert (parent is None) == (forkpoint == 0)
|
||||
if 0 < forkpoint <= constants.net.max_checkpoint():
|
||||
raise Exception(f"cannot fork below max checkpoint. forkpoint: {forkpoint}")
|
||||
self.config = config
|
||||
self.forkpoint = forkpoint # height of first header
|
||||
self.parent = parent
|
||||
self._forkpoint_hash = forkpoint_hash # blockhash at forkpoint. "first hash"
|
||||
self._prev_hash = prev_hash # blockhash immediately before forkpoint
|
||||
self.lock = threading.RLock()
|
||||
self.update_size()
|
||||
|
||||
def with_lock(func):
|
||||
def func_wrapper(self, *args, **kwargs):
|
||||
with self.lock:
|
||||
return func(self, *args, **kwargs)
|
||||
return func_wrapper
|
||||
|
||||
@property
|
||||
def checkpoints(self):
|
||||
return constants.net.CHECKPOINTS
|
||||
|
||||
def get_max_child(self) -> Optional[int]:
|
||||
with blockchains_lock: chains = list(blockchains.values())
|
||||
children = list(filter(lambda y: y.parent==self, chains))
|
||||
return max([x.forkpoint for x in children]) if children else None
|
||||
|
||||
def get_max_forkpoint(self) -> int:
|
||||
"""Returns the max height where there is a fork
|
||||
related to this chain.
|
||||
"""
|
||||
mc = self.get_max_child()
|
||||
return mc if mc is not None else self.forkpoint
|
||||
|
||||
@with_lock
|
||||
def get_branch_size(self) -> int:
|
||||
return self.height() - self.get_max_forkpoint() + 1
|
||||
|
||||
def get_name(self) -> str:
|
||||
return self.get_hash(self.get_max_forkpoint()).lstrip('0')[0:10]
|
||||
|
||||
def check_header(self, header: dict) -> bool:
|
||||
header_hash = hash_header(header)
|
||||
height = header.get('block_height')
|
||||
return self.check_hash(height, header_hash)
|
||||
|
||||
def check_hash(self, height: int, header_hash: str) -> bool:
|
||||
"""Returns whether the hash of the block at given height
|
||||
is the given hash.
|
||||
"""
|
||||
assert isinstance(header_hash, str) and len(header_hash) == 64, header_hash # hex
|
||||
try:
|
||||
return header_hash == self.get_hash(height)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def fork(parent, header: dict) -> 'Blockchain':
|
||||
if not parent.can_connect(header, check_height=False):
|
||||
raise Exception("forking header does not connect to parent chain")
|
||||
forkpoint = header.get('block_height')
|
||||
self = Blockchain(config=parent.config,
|
||||
forkpoint=forkpoint,
|
||||
parent=parent,
|
||||
forkpoint_hash=hash_header(header),
|
||||
prev_hash=parent.get_hash(forkpoint-1))
|
||||
open(self.path(), 'w+').close()
|
||||
self.save_header(header)
|
||||
# put into global dict. note that in some cases
|
||||
# save_header might have already put it there but that's OK
|
||||
chain_id = self.get_id()
|
||||
with blockchains_lock:
|
||||
blockchains[chain_id] = self
|
||||
return self
|
||||
|
||||
@with_lock
|
||||
def height(self) -> int:
|
||||
return self.forkpoint + self.size() - 1
|
||||
|
||||
@with_lock
|
||||
def size(self) -> int:
|
||||
return self._size
|
||||
|
||||
@with_lock
|
||||
def update_size(self) -> None:
|
||||
p = self.path()
|
||||
self._size = os.path.getsize(p)//HEADER_SIZE if os.path.exists(p) else 0
|
||||
|
||||
#def pow_hash_header(header):
|
||||
# return hash_encode(getPoWHash(bfh(serialize_header(header))))
|
||||
|
||||
@classmethod
|
||||
def verify_header(cls, header: dict, prev_hash: str, target: int, expected_header_hash: str=None) -> None:
|
||||
_hash = hash_header(header)
|
||||
_powhash = pow_hash_header(header)
|
||||
if expected_header_hash and expected_header_hash != _hash:
|
||||
raise Exception("hash mismatches with expected: {} vs {}".format(expected_header_hash, _hash))
|
||||
if prev_hash != header.get('prev_block_hash'):
|
||||
raise Exception("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
|
||||
if constants.net.TESTNET:
|
||||
return
|
||||
bits = cls.target_to_bits(target)
|
||||
bits = target
|
||||
if bits != header.get('bits'):
|
||||
raise Exception("bits mismatch: %s vs %s" % (bits, header.get('bits')))
|
||||
block_hash_as_num = int.from_bytes(bfh(_hash), byteorder='big')
|
||||
target_val = cls.bits_to_target(bits)
|
||||
if int('0x' + _powhash, 16) > target_val:
|
||||
raise Exception("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target_val))
|
||||
|
||||
def verify_chunk(self, index: int, data: bytes) -> None:
|
||||
num = len(data) // HEADER_SIZE
|
||||
current_header = index * 2016
|
||||
print('chunk ' + str(index))
|
||||
prev_hash = self.get_hash(current_header - 1)
|
||||
headerLast = None
|
||||
headerFirst = None
|
||||
capture = None
|
||||
lst = []
|
||||
for i in range(num):
|
||||
averaging_interval = self.AveragingInterval(current_header)
|
||||
difficulty_interval = self.DifficultyAdjustmentInterval(current_header)
|
||||
if current_header < 426000:
|
||||
target = self.get_target(current_header - 1, headerLast, headerFirst)
|
||||
try:
|
||||
expected_header_hash = self.get_hash(current_header)
|
||||
except MissingHeader:
|
||||
expected_header_hash = None
|
||||
raw_header = data[i*HEADER_SIZE : (i+1)*HEADER_SIZE]
|
||||
header = deserialize_header(raw_header, current_header)
|
||||
self.verify_header(header, prev_hash, target, expected_header_hash)
|
||||
prev_hash = hash_header(header)
|
||||
headerLast = header
|
||||
if current_header == 0:
|
||||
headerFirst = header
|
||||
elif (current_header + averaging_interval + 1) % difficulty_interval == 0:
|
||||
capture = header
|
||||
if current_header != 0 and current_header % difficulty_interval == 0:
|
||||
headerFirst = capture
|
||||
if current_header >= 425993:
|
||||
lst.append(headerLast)
|
||||
current_header = current_header + 1
|
||||
else:
|
||||
if len(lst)>6:
|
||||
headerFirst = lst[0]
|
||||
target = self.get_target(current_header - 1, headerLast, headerFirst)
|
||||
try:
|
||||
expected_header_hash = self.get_hash(current_header)
|
||||
except MissingHeader:
|
||||
expected_header_hash = None
|
||||
raw_header = data[i * HEADER_SIZE: (i + 1) * HEADER_SIZE]
|
||||
header = deserialize_header(raw_header, current_header)
|
||||
self.verify_header(header, prev_hash, target, expected_header_hash)
|
||||
prev_hash = hash_header(header)
|
||||
headerLast = header
|
||||
lst.append(header)
|
||||
if len(lst)>7:
|
||||
lst.pop(0)
|
||||
current_header = current_header + 1
|
||||
|
||||
@with_lock
|
||||
def path(self):
|
||||
d = util.get_headers_dir(self.config)
|
||||
if self.parent is None:
|
||||
filename = 'blockchain_headers'
|
||||
else:
|
||||
assert self.forkpoint > 0, self.forkpoint
|
||||
prev_hash = self._prev_hash.lstrip('0')
|
||||
first_hash = self._forkpoint_hash.lstrip('0')
|
||||
basename = f'fork2_{self.forkpoint}_{prev_hash}_{first_hash}'
|
||||
filename = os.path.join('forks', basename)
|
||||
return os.path.join(d, filename)
|
||||
|
||||
@with_lock
|
||||
def save_chunk(self, index: int, chunk: bytes):
|
||||
assert index >= 0, index
|
||||
chunk_within_checkpoint_region = index < len(self.checkpoints)
|
||||
# chunks in checkpoint region are the responsibility of the 'main chain'
|
||||
if chunk_within_checkpoint_region and self.parent is not None:
|
||||
main_chain = get_best_chain()
|
||||
main_chain.save_chunk(index, chunk)
|
||||
return
|
||||
|
||||
delta_height = (index * 2016 - self.forkpoint)
|
||||
delta_bytes = delta_height * HEADER_SIZE
|
||||
# if this chunk contains our forkpoint, only save the part after forkpoint
|
||||
# (the part before is the responsibility of the parent)
|
||||
if delta_bytes < 0:
|
||||
chunk = chunk[-delta_bytes:]
|
||||
delta_bytes = 0
|
||||
truncate = not chunk_within_checkpoint_region
|
||||
self.write(chunk, delta_bytes, truncate)
|
||||
self.swap_with_parent()
|
||||
|
||||
def swap_with_parent(self) -> None:
|
||||
parent_lock = self.parent.lock if self.parent is not None else threading.Lock()
|
||||
with parent_lock, self.lock, blockchains_lock: # this order should not deadlock
|
||||
# do the swap; possibly multiple ones
|
||||
cnt = 0
|
||||
while self._swap_with_parent():
|
||||
cnt += 1
|
||||
if cnt > len(blockchains): # make sure we are making progress
|
||||
raise Exception(f'swapping fork with parent too many times: {cnt}')
|
||||
|
||||
def _swap_with_parent(self) -> bool:
|
||||
"""Check if this chain became stronger than its parent, and swap
|
||||
the underlying files if so. The Blockchain instances will keep
|
||||
'containing' the same headers, but their ids change and so
|
||||
they will be stored in different files."""
|
||||
if self.parent is None:
|
||||
return False
|
||||
if self.parent.get_chainwork() >= self.get_chainwork():
|
||||
return False
|
||||
self.print_error("swap", self.forkpoint, self.parent.forkpoint)
|
||||
parent_branch_size = self.parent.height() - self.forkpoint + 1
|
||||
forkpoint = self.forkpoint # type: Optional[int]
|
||||
parent = self.parent # type: Optional[Blockchain]
|
||||
child_old_id = self.get_id()
|
||||
parent_old_id = parent.get_id()
|
||||
# swap files
|
||||
# child takes parent's name
|
||||
# parent's new name will be something new (not child's old name)
|
||||
self.assert_headers_file_available(self.path())
|
||||
child_old_name = self.path()
|
||||
with open(self.path(), 'rb') as f:
|
||||
my_data = f.read()
|
||||
self.assert_headers_file_available(parent.path())
|
||||
with open(parent.path(), 'rb') as f:
|
||||
f.seek((forkpoint - parent.forkpoint)*HEADER_SIZE)
|
||||
parent_data = f.read(parent_branch_size*HEADER_SIZE)
|
||||
self.write(parent_data, 0)
|
||||
parent.write(my_data, (forkpoint - parent.forkpoint)*HEADER_SIZE)
|
||||
# swap parameters
|
||||
self.parent, parent.parent = parent.parent, self # type: Optional[Blockchain], Optional[Blockchain]
|
||||
self.forkpoint, parent.forkpoint = parent.forkpoint, self.forkpoint
|
||||
self._forkpoint_hash, parent._forkpoint_hash = parent._forkpoint_hash, hash_raw_header(bh2u(parent_data[:HEADER_SIZE]))
|
||||
self._prev_hash, parent._prev_hash = parent._prev_hash, self._prev_hash
|
||||
# parent's new name
|
||||
os.replace(child_old_name, parent.path())
|
||||
self.update_size()
|
||||
parent.update_size()
|
||||
# update pointers
|
||||
blockchains.pop(child_old_id, None)
|
||||
blockchains.pop(parent_old_id, None)
|
||||
blockchains[self.get_id()] = self
|
||||
blockchains[parent.get_id()] = parent
|
||||
return True
|
||||
|
||||
def get_id(self) -> str:
|
||||
return self._forkpoint_hash
|
||||
|
||||
def assert_headers_file_available(self, path):
|
||||
if os.path.exists(path):
|
||||
return
|
||||
elif not os.path.exists(util.get_headers_dir(self.config)):
|
||||
raise FileNotFoundError('Electrum headers_dir does not exist. Was it deleted while running?')
|
||||
else:
|
||||
raise FileNotFoundError('Cannot find headers file but headers_dir is there. Should be at {}'.format(path))
|
||||
|
||||
@with_lock
|
||||
def write(self, data: bytes, offset: int, truncate: bool=True) -> None:
|
||||
filename = self.path()
|
||||
self.assert_headers_file_available(filename)
|
||||
with open(filename, 'rb+') as f:
|
||||
if truncate and offset != self._size * HEADER_SIZE:
|
||||
f.seek(offset)
|
||||
f.truncate()
|
||||
f.seek(offset)
|
||||
f.write(data)
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
self.update_size()
|
||||
|
||||
@with_lock
|
||||
def save_header(self, header: dict) -> None:
|
||||
delta = header.get('block_height') - self.forkpoint
|
||||
data = bfh(serialize_header(header))
|
||||
# headers are only _appended_ to the end:
|
||||
assert delta == self.size(), (delta, self.size())
|
||||
assert len(data) == HEADER_SIZE
|
||||
self.write(data, delta*HEADER_SIZE)
|
||||
self.swap_with_parent()
|
||||
|
||||
@with_lock
|
||||
def read_header(self, height: int) -> Optional[dict]:
|
||||
if height < 0:
|
||||
return
|
||||
if height < self.forkpoint:
|
||||
return self.parent.read_header(height)
|
||||
if height > self.height():
|
||||
return
|
||||
delta = height - self.forkpoint
|
||||
name = self.path()
|
||||
self.assert_headers_file_available(name)
|
||||
with open(name, 'rb') as f:
|
||||
f.seek(delta * HEADER_SIZE)
|
||||
h = f.read(HEADER_SIZE)
|
||||
if len(h) < HEADER_SIZE:
|
||||
raise Exception('Expected to read a full header. This was only {} bytes'.format(len(h)))
|
||||
if h == bytes([0])*HEADER_SIZE:
|
||||
return None
|
||||
return deserialize_header(h, height)
|
||||
|
||||
def header_at_tip(self) -> Optional[dict]:
|
||||
"""Return latest header."""
|
||||
height = self.height()
|
||||
return self.read_header(height)
|
||||
|
||||
def get_hash(self, height: int) -> str:
|
||||
def is_height_checkpoint():
|
||||
within_cp_range = height <= constants.net.max_checkpoint()
|
||||
at_chunk_boundary = (height+1) % 2016 == 0
|
||||
return within_cp_range and at_chunk_boundary
|
||||
|
||||
if height == -1:
|
||||
return '0000000000000000000000000000000000000000000000000000000000000000'
|
||||
elif height == 0:
|
||||
return constants.net.GENESIS
|
||||
elif is_height_checkpoint():
|
||||
index = height // 2016
|
||||
h, t = self.checkpoints[index]
|
||||
return h
|
||||
else:
|
||||
header = self.read_header(height)
|
||||
if header is None:
|
||||
raise MissingHeader(height)
|
||||
return hash_header(header)
|
||||
|
||||
def get_target(self, index: int, headerLast: dict=None, headerFirst: dict=None) -> int:
|
||||
# compute target from chunk x, used in chunk x+1
|
||||
if constants.net.TESTNET:
|
||||
return 0
|
||||
# The range is first 90 blocks because FLO's block time was 90 blocks when it started
|
||||
if -1 <= index <= 88:
|
||||
return 0x1e0ffff0
|
||||
if index < len(self.checkpoints):
|
||||
h, t = self.checkpoints[index]
|
||||
return t
|
||||
# new target
|
||||
if headerLast is None:
|
||||
headerLast = self.read_header(index)
|
||||
height = headerLast["block_height"]
|
||||
# check if the height passes is in range for retargeting
|
||||
if (height + 1) % self.DifficultyAdjustmentInterval(height + 1) != 0:
|
||||
return int(headerLast["bits"])
|
||||
if headerFirst is None:
|
||||
averagingInterval = self.AveragingInterval(height + 1)
|
||||
blockstogoback = averagingInterval - 1
|
||||
# print("Blocks to go back = " + str(blockstogoback))
|
||||
if (height + 1) != averagingInterval:
|
||||
blockstogoback = averagingInterval
|
||||
firstHeight = height - blockstogoback
|
||||
headerFirst = self.read_header(int(firstHeight))
|
||||
|
||||
firstBlockTime = headerFirst["timestamp"]
|
||||
nMinActualTimespan = int(self.MinActualTimespan(int(headerLast["block_height"]) + 1))
|
||||
|
||||
nMaxActualTimespan = int(self.MaxActualTimespan(int(headerLast["block_height"]) + 1))
|
||||
# Limit adjustment step
|
||||
nActualTimespan = headerLast["timestamp"] - firstBlockTime
|
||||
if nActualTimespan < nMinActualTimespan:
|
||||
nActualTimespan = nMinActualTimespan
|
||||
if nActualTimespan > nMaxActualTimespan:
|
||||
nActualTimespan = nMaxActualTimespan
|
||||
# Retarget
|
||||
bnNewBits = int(headerLast["bits"])
|
||||
bnNew = self.bits_to_target(bnNewBits)
|
||||
bnOld = bnNew
|
||||
# FLO: intermediate uint256 can overflow by 1 bit
|
||||
# const arith_uint256 bnPowLimit = UintToArith256(params.powLimit);
|
||||
fShift = bnNew > MAX_TARGET - 1
|
||||
|
||||
if (fShift):
|
||||
bnNew = bnNew >> 1
|
||||
bnNew = bnNew * nActualTimespan
|
||||
bnNew = bnNew / self.TargetTimespan(headerLast["block_height"] + 1)
|
||||
if fShift:
|
||||
bnNew = bnNew << 1
|
||||
if bnNew > MAX_TARGET:
|
||||
bnNew = MAX_TARGET
|
||||
bnNew = self.target_to_bits(int(bnNew))
|
||||
return bnNew
|
||||
|
||||
@classmethod
|
||||
def bits_to_target(cls, bits: int) -> int:
|
||||
bitsN = (bits >> 24) & 0xff
|
||||
if not (0x03 <= bitsN <= 0x1e):
|
||||
raise Exception("First part of bits should be in [0x03, 0x1e]")
|
||||
bitsBase = bits & 0xffffff
|
||||
if not (0x8000 <= bitsBase <= 0x7fffff):
|
||||
raise Exception("Second part of bits should be in [0x8000, 0x7fffff]")
|
||||
return bitsBase << (8 * (bitsN - 3))
|
||||
|
||||
@classmethod
|
||||
def target_to_bits(cls, target: int) -> int:
|
||||
c = ("%064x" % target)[2:]
|
||||
while c[:2] == '00' and len(c) > 6:
|
||||
c = c[2:]
|
||||
bitsN, bitsBase = len(c) // 2, int.from_bytes(bfh(c[:6]), byteorder='big')
|
||||
if bitsBase >= 0x800000:
|
||||
bitsN += 1
|
||||
bitsBase >>= 8
|
||||
return bitsN << 24 | bitsBase
|
||||
|
||||
def chainwork_of_header_at_height(self, height: int) -> int:
|
||||
"""work done by single header at given height"""
|
||||
chunk_idx = height // 2016 - 1
|
||||
target = self.get_target(chunk_idx)
|
||||
work = ((2 ** 256 - target - 1) // (target + 1)) + 1
|
||||
return work
|
||||
|
||||
@with_lock
|
||||
def get_chainwork(self, height=None) -> int:
|
||||
if height is None:
|
||||
height = max(0, self.height())
|
||||
if constants.net.TESTNET:
|
||||
# On testnet/regtest, difficulty works somewhat different.
|
||||
# It's out of scope to properly implement that.
|
||||
return height
|
||||
last_retarget = height // 2016 * 2016 - 1
|
||||
cached_height = last_retarget
|
||||
while _CHAINWORK_CACHE.get(self.get_hash(cached_height)) is None:
|
||||
if cached_height <= -1:
|
||||
break
|
||||
cached_height -= 2016
|
||||
assert cached_height >= -1, cached_height
|
||||
running_total = _CHAINWORK_CACHE[self.get_hash(cached_height)]
|
||||
while cached_height < last_retarget:
|
||||
cached_height += 2016
|
||||
work_in_single_header = self.chainwork_of_header_at_height(cached_height)
|
||||
work_in_chunk = 2016 * work_in_single_header
|
||||
running_total += work_in_chunk
|
||||
_CHAINWORK_CACHE[self.get_hash(cached_height)] = running_total
|
||||
cached_height += 2016
|
||||
work_in_single_header = self.chainwork_of_header_at_height(cached_height)
|
||||
work_in_last_partial_chunk = (height % 2016 + 1) * work_in_single_header
|
||||
return running_total + work_in_last_partial_chunk
|
||||
|
||||
def can_connect(self, header: dict, check_height: bool=True) -> bool:
|
||||
if header is None:
|
||||
return False
|
||||
height = header['block_height']
|
||||
if check_height and self.height() != height - 1:
|
||||
#self.print_error("cannot connect at height", height)
|
||||
return False
|
||||
if height == 0:
|
||||
return hash_header(header) == constants.net.GENESIS
|
||||
try:
|
||||
prev_hash = self.get_hash(height - 1)
|
||||
except:
|
||||
return False
|
||||
if prev_hash != header.get('prev_block_hash'):
|
||||
return False
|
||||
try:
|
||||
target = self.get_target(height - 1)
|
||||
except MissingHeader:
|
||||
return False
|
||||
try:
|
||||
self.verify_header(header, prev_hash, target)
|
||||
except BaseException as e:
|
||||
return False
|
||||
return True
|
||||
|
||||
def connect_chunk(self, idx: int, hexdata: str) -> bool:
|
||||
assert idx >= 0, idx
|
||||
try:
|
||||
data = bfh(hexdata)
|
||||
self.verify_chunk(idx, data)
|
||||
#self.print_error("validated chunk %d" % idx)
|
||||
self.save_chunk(idx, data)
|
||||
return True
|
||||
except BaseException as e:
|
||||
self.print_error(f'verify_chunk idx {idx} failed: {repr(e)}')
|
||||
return False
|
||||
|
||||
def get_checkpoints(self):
|
||||
# for each chunk, store the hash of the last block and the target after the chunk
|
||||
cp = []
|
||||
n = self.height() // 2016
|
||||
for index in range(n):
|
||||
h = self.get_hash((index+1) * 2016 -1)
|
||||
target = self.get_target(index)
|
||||
cp.append((h, target))
|
||||
return cp
|
||||
|
||||
def AveragingInterval(self, height):
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return constants.net.nAveragingInterval_Version1
|
||||
# V2
|
||||
elif height < constants.net.nHeight_Difficulty_Version3:
|
||||
return constants.net.nAveragingInterval_Version2
|
||||
# V3
|
||||
else:
|
||||
return constants.net.nAveragingInterval_Version3
|
||||
|
||||
def MinActualTimespan(self, height):
|
||||
averagingTargetTimespan = self.AveragingInterval(height) * constants.net.nPowTargetSpacing
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return int(averagingTargetTimespan * (100 - constants.net.nMaxAdjustUp_Version1) / 100)
|
||||
# V2
|
||||
elif height < constants.net.nHeight_Difficulty_Version3:
|
||||
return int(averagingTargetTimespan * (100 - constants.net.nMaxAdjustUp_Version2) / 100)
|
||||
# V3
|
||||
else:
|
||||
return int(averagingTargetTimespan * (100 - constants.net.nMaxAdjustUp_Version3) / 100)
|
||||
|
||||
def MaxActualTimespan(self, height):
|
||||
averagingTargetTimespan = self.AveragingInterval(height) * constants.net.nPowTargetSpacing
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return int(averagingTargetTimespan * (100 + constants.net.nMaxAdjustDown_Version1) / 100)
|
||||
# V2
|
||||
elif height < constants.net.nHeight_Difficulty_Version3:
|
||||
return int(averagingTargetTimespan * (100 + constants.net.nMaxAdjustDown_Version2) / 100)
|
||||
# V3
|
||||
else:
|
||||
return int(averagingTargetTimespan * (100 + constants.net.nMaxAdjustDown_Version3) / 100)
|
||||
|
||||
def TargetTimespan(self, height):
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return constants.net.nTargetTimespan_Version1
|
||||
# V2
|
||||
if height < constants.net.nHeight_Difficulty_Version3:
|
||||
return constants.net.nAveragingInterval_Version2 * constants.net.nPowTargetSpacing
|
||||
# V3
|
||||
return constants.net.nAveragingInterval_Version3 * constants.net.nPowTargetSpacing
|
||||
|
||||
def DifficultyAdjustmentInterval(self, height):
|
||||
# V1
|
||||
if height < constants.net.nHeight_Difficulty_Version2:
|
||||
return constants.net.nInterval_Version1
|
||||
# V2
|
||||
if height < constants.net.nHeight_Difficulty_Version3:
|
||||
return constants.net.nInterval_Version2
|
||||
# V3
|
||||
return constants.net.nInterval_Version3
|
||||
|
||||
|
||||
def check_header(header: dict) -> Optional[Blockchain]:
|
||||
if type(header) is not dict:
|
||||
return None
|
||||
with blockchains_lock: chains = list(blockchains.values())
|
||||
for b in chains:
|
||||
if b.check_header(header):
|
||||
return b
|
||||
return None
|
||||
|
||||
|
||||
def can_connect(header: dict) -> Optional[Blockchain]:
|
||||
with blockchains_lock: chains = list(blockchains.values())
|
||||
for b in chains:
|
||||
if b.can_connect(header):
|
||||
return b
|
||||
return None
|
||||
@ -1,3 +0,0 @@
|
||||
[
|
||||
|
||||
]
|
||||
@ -1,3 +0,0 @@
|
||||
[
|
||||
|
||||
]
|
||||
@ -1,216 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2018 The Electrum developers
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import base64
|
||||
import os
|
||||
import hashlib
|
||||
import hmac
|
||||
from typing import Union
|
||||
|
||||
import pyaes
|
||||
|
||||
from .util import assert_bytes, InvalidPassword, to_bytes, to_string, WalletFileException
|
||||
from .i18n import _
|
||||
|
||||
|
||||
try:
|
||||
from Cryptodome.Cipher import AES
|
||||
except:
|
||||
AES = None
|
||||
|
||||
|
||||
class InvalidPadding(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def append_PKCS7_padding(data: bytes) -> bytes:
|
||||
assert_bytes(data)
|
||||
padlen = 16 - (len(data) % 16)
|
||||
return data + bytes([padlen]) * padlen
|
||||
|
||||
|
||||
def strip_PKCS7_padding(data: bytes) -> bytes:
|
||||
assert_bytes(data)
|
||||
if len(data) % 16 != 0 or len(data) == 0:
|
||||
raise InvalidPadding("invalid length")
|
||||
padlen = data[-1]
|
||||
if not (0 < padlen <= 16):
|
||||
raise InvalidPadding("invalid padding byte (out of range)")
|
||||
for i in data[-padlen:]:
|
||||
if i != padlen:
|
||||
raise InvalidPadding("invalid padding byte (inconsistent)")
|
||||
return data[0:-padlen]
|
||||
|
||||
|
||||
def aes_encrypt_with_iv(key: bytes, iv: bytes, data: bytes) -> bytes:
|
||||
assert_bytes(key, iv, data)
|
||||
data = append_PKCS7_padding(data)
|
||||
if AES:
|
||||
e = AES.new(key, AES.MODE_CBC, iv).encrypt(data)
|
||||
else:
|
||||
aes_cbc = pyaes.AESModeOfOperationCBC(key, iv=iv)
|
||||
aes = pyaes.Encrypter(aes_cbc, padding=pyaes.PADDING_NONE)
|
||||
e = aes.feed(data) + aes.feed() # empty aes.feed() flushes buffer
|
||||
return e
|
||||
|
||||
|
||||
def aes_decrypt_with_iv(key: bytes, iv: bytes, data: bytes) -> bytes:
|
||||
assert_bytes(key, iv, data)
|
||||
if AES:
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
data = cipher.decrypt(data)
|
||||
else:
|
||||
aes_cbc = pyaes.AESModeOfOperationCBC(key, iv=iv)
|
||||
aes = pyaes.Decrypter(aes_cbc, padding=pyaes.PADDING_NONE)
|
||||
data = aes.feed(data) + aes.feed() # empty aes.feed() flushes buffer
|
||||
try:
|
||||
return strip_PKCS7_padding(data)
|
||||
except InvalidPadding:
|
||||
raise InvalidPassword()
|
||||
|
||||
|
||||
def EncodeAES_base64(secret: bytes, msg: bytes) -> bytes:
|
||||
"""Returns base64 encoded ciphertext."""
|
||||
e = EncodeAES_bytes(secret, msg)
|
||||
return base64.b64encode(e)
|
||||
|
||||
|
||||
def EncodeAES_bytes(secret: bytes, msg: bytes) -> bytes:
|
||||
assert_bytes(msg)
|
||||
iv = bytes(os.urandom(16))
|
||||
ct = aes_encrypt_with_iv(secret, iv, msg)
|
||||
return iv + ct
|
||||
|
||||
|
||||
def DecodeAES_base64(secret: bytes, ciphertext_b64: Union[bytes, str]) -> bytes:
|
||||
ciphertext = bytes(base64.b64decode(ciphertext_b64))
|
||||
return DecodeAES_bytes(secret, ciphertext)
|
||||
|
||||
|
||||
def DecodeAES_bytes(secret: bytes, ciphertext: bytes) -> bytes:
|
||||
assert_bytes(ciphertext)
|
||||
iv, e = ciphertext[:16], ciphertext[16:]
|
||||
s = aes_decrypt_with_iv(secret, iv, e)
|
||||
return s
|
||||
|
||||
|
||||
PW_HASH_VERSION_LATEST = 1
|
||||
KNOWN_PW_HASH_VERSIONS = (1, 2, )
|
||||
SUPPORTED_PW_HASH_VERSIONS = (1, )
|
||||
assert PW_HASH_VERSION_LATEST in KNOWN_PW_HASH_VERSIONS
|
||||
assert PW_HASH_VERSION_LATEST in SUPPORTED_PW_HASH_VERSIONS
|
||||
|
||||
|
||||
class UnexpectedPasswordHashVersion(InvalidPassword, WalletFileException):
|
||||
def __init__(self, version):
|
||||
self.version = version
|
||||
|
||||
def __str__(self):
|
||||
return "{unexpected}: {version}\n{instruction}".format(
|
||||
unexpected=_("Unexpected password hash version"),
|
||||
version=self.version,
|
||||
instruction=_('You are most likely using an outdated version of Electrum. Please update.'))
|
||||
|
||||
|
||||
class UnsupportedPasswordHashVersion(InvalidPassword, WalletFileException):
|
||||
def __init__(self, version):
|
||||
self.version = version
|
||||
|
||||
def __str__(self):
|
||||
return "{unsupported}: {version}\n{instruction}".format(
|
||||
unsupported=_("Unsupported password hash version"),
|
||||
version=self.version,
|
||||
instruction=f"To open this wallet, try 'git checkout password_v{self.version}'.\n"
|
||||
"Alternatively, restore from seed.")
|
||||
|
||||
|
||||
def _hash_password(password: Union[bytes, str], *, version: int) -> bytes:
|
||||
pw = to_bytes(password, 'utf8')
|
||||
if version not in SUPPORTED_PW_HASH_VERSIONS:
|
||||
raise UnsupportedPasswordHashVersion(version)
|
||||
if version == 1:
|
||||
return sha256d(pw)
|
||||
else:
|
||||
assert version not in KNOWN_PW_HASH_VERSIONS
|
||||
raise UnexpectedPasswordHashVersion(version)
|
||||
|
||||
|
||||
def pw_encode(data: str, password: Union[bytes, str, None], *, version: int) -> str:
|
||||
if not password:
|
||||
return data
|
||||
if version not in KNOWN_PW_HASH_VERSIONS:
|
||||
raise UnexpectedPasswordHashVersion(version)
|
||||
# derive key from password
|
||||
secret = _hash_password(password, version=version)
|
||||
# encrypt given data
|
||||
ciphertext = EncodeAES_bytes(secret, to_bytes(data, "utf8"))
|
||||
ciphertext_b64 = base64.b64encode(ciphertext)
|
||||
return ciphertext_b64.decode('utf8')
|
||||
|
||||
|
||||
def pw_decode(data: str, password: Union[bytes, str, None], *, version: int) -> str:
|
||||
if password is None:
|
||||
return data
|
||||
if version not in KNOWN_PW_HASH_VERSIONS:
|
||||
raise UnexpectedPasswordHashVersion(version)
|
||||
data_bytes = bytes(base64.b64decode(data))
|
||||
# derive key from password
|
||||
secret = _hash_password(password, version=version)
|
||||
# decrypt given data
|
||||
try:
|
||||
d = to_string(DecodeAES_bytes(secret, data_bytes), "utf8")
|
||||
except Exception as e:
|
||||
raise InvalidPassword() from e
|
||||
return d
|
||||
|
||||
|
||||
def sha256(x: Union[bytes, str]) -> bytes:
|
||||
x = to_bytes(x, 'utf8')
|
||||
return bytes(hashlib.sha256(x).digest())
|
||||
|
||||
|
||||
def sha256d(x: Union[bytes, str]) -> bytes:
|
||||
x = to_bytes(x, 'utf8')
|
||||
out = bytes(sha256(sha256(x)))
|
||||
return out
|
||||
|
||||
|
||||
def hash_160(x: bytes) -> bytes:
|
||||
try:
|
||||
md = hashlib.new('ripemd160')
|
||||
md.update(sha256(x))
|
||||
return md.digest()
|
||||
except BaseException:
|
||||
from . import ripemd
|
||||
md = ripemd.new(sha256(x))
|
||||
return md.digest()
|
||||
|
||||
|
||||
def hmac_oneshot(key: bytes, msg: bytes, digest) -> bytes:
|
||||
if hasattr(hmac, 'digest'):
|
||||
# requires python 3.7+; faster
|
||||
return hmac.digest(key, msg, digest)
|
||||
else:
|
||||
return hmac.new(key, msg, digest).digest()
|
||||
@ -1,44 +0,0 @@
|
||||
{
|
||||
"CoinMarketcap": [
|
||||
"AED",
|
||||
"ALL",
|
||||
"ARS",
|
||||
"AUD",
|
||||
"BHD",
|
||||
"BOB",
|
||||
"BRL",
|
||||
"KHR",
|
||||
"CAD",
|
||||
"CLP",
|
||||
"CNY",
|
||||
"COP",
|
||||
"CUP",
|
||||
"CZK",
|
||||
"EGP",
|
||||
"EUR",
|
||||
"HKD",
|
||||
"HUF",
|
||||
"ISK",
|
||||
"INR",
|
||||
"IDR",
|
||||
"IQD",
|
||||
"ILS",
|
||||
"JPY",
|
||||
"LBP",
|
||||
"MYR",
|
||||
"MXN",
|
||||
"NPR",
|
||||
"NZD",
|
||||
"NGN",
|
||||
"NOK",
|
||||
"GBP",
|
||||
"QAR",
|
||||
"RUB",
|
||||
"SGD",
|
||||
"SEK",
|
||||
"CHF",
|
||||
"THB",
|
||||
"USD",
|
||||
"VND"
|
||||
]
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
../run_electrum
|
||||
|
Before Width: | Height: | Size: 687 B |
|
Before Width: | Height: | Size: 1.3 KiB |
|
Before Width: | Height: | Size: 7.4 KiB |
|
Before Width: | Height: | Size: 8.3 KiB |
|
Before Width: | Height: | Size: 7.1 KiB |
|
Before Width: | Height: | Size: 7.3 KiB |
|
Before Width: | Height: | Size: 9.8 KiB |
|
Before Width: | Height: | Size: 528 B |
|
Before Width: | Height: | Size: 788 B |
|
Before Width: | Height: | Size: 39 KiB |
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 144 KiB |
|
Before Width: | Height: | Size: 3.8 KiB |
|
Before Width: | Height: | Size: 3.6 KiB |
|
Before Width: | Height: | Size: 62 KiB |
|
Before Width: | Height: | Size: 60 KiB |
|
Before Width: | Height: | Size: 62 KiB |
|
Before Width: | Height: | Size: 1.5 KiB |
@ -1,90 +0,0 @@
|
||||
# Kivy GUI
|
||||
|
||||
The Kivy GUI is used with Electrum on Android devices.
|
||||
To generate an APK file, follow these instructions.
|
||||
|
||||
## Android binary with Docker
|
||||
|
||||
This assumes an Ubuntu host, but it should not be too hard to adapt to another
|
||||
similar system. The docker commands should be executed in the project's root
|
||||
folder.
|
||||
|
||||
1. Install Docker
|
||||
|
||||
```
|
||||
$ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
|
||||
$ sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
$ sudo apt-get update
|
||||
$ sudo apt-get install -y docker-ce
|
||||
```
|
||||
|
||||
2. Build image
|
||||
|
||||
```
|
||||
$ sudo docker build -t electrum-android-builder-img electrum/gui/kivy/tools
|
||||
```
|
||||
|
||||
3. Build locale files
|
||||
|
||||
```
|
||||
$ ./contrib/make_locale
|
||||
```
|
||||
|
||||
4. Prepare pure python dependencies
|
||||
|
||||
```
|
||||
$ ./contrib/make_packages
|
||||
```
|
||||
|
||||
5. Build binaries
|
||||
|
||||
```
|
||||
$ sudo docker run -it --rm \
|
||||
--name electrum-android-builder-cont \
|
||||
-v $PWD:/home/user/wspace/electrum \
|
||||
-v ~/.keystore:/home/user/.keystore \
|
||||
--workdir /home/user/wspace/electrum \
|
||||
electrum-android-builder-img \
|
||||
./contrib/make_apk
|
||||
```
|
||||
This mounts the project dir inside the container,
|
||||
and so the modifications will affect it, e.g. `.buildozer` folder
|
||||
will be created.
|
||||
|
||||
5. The generated binary is in `./bin`.
|
||||
|
||||
|
||||
|
||||
## FAQ
|
||||
|
||||
### I changed something but I don't see any differences on the phone. What did I do wrong?
|
||||
You probably need to clear the cache: `rm -rf .buildozer/android/platform/build/{build,dists}`
|
||||
|
||||
|
||||
### How do I deploy on connected phone for quick testing?
|
||||
Assuming `adb` is installed:
|
||||
```
|
||||
$ adb -d install -r bin/Electrum-*-debug.apk
|
||||
$ adb shell monkey -p org.electrum.electrum 1
|
||||
```
|
||||
|
||||
|
||||
### How do I get an interactive shell inside docker?
|
||||
```
|
||||
$ sudo docker run -it --rm \
|
||||
-v $PWD:/home/user/wspace/electrum \
|
||||
--workdir /home/user/wspace/electrum \
|
||||
electrum-android-builder-img
|
||||
```
|
||||
|
||||
|
||||
### How do I get more verbose logs?
|
||||
See `log_level` in `buildozer.spec`
|
||||
|
||||
|
||||
### Kivy can be run directly on Linux Desktop. How?
|
||||
Install Kivy.
|
||||
|
||||
Build atlas: `(cd electrum/gui/kivy/; make theming)`
|
||||
|
||||
Run electrum with the `-g` switch: `electrum -g kivy`
|
||||
@ -1,89 +0,0 @@
|
||||
package org.electrum.qr;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.os.Bundle;
|
||||
import android.util.Log;
|
||||
import android.content.Intent;
|
||||
import android.support.v4.app.ActivityCompat;
|
||||
import android.Manifest;
|
||||
import android.content.pm.PackageManager;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import me.dm7.barcodescanner.zxing.ZXingScannerView;
|
||||
|
||||
import com.google.zxing.Result;
|
||||
import com.google.zxing.BarcodeFormat;
|
||||
|
||||
public class SimpleScannerActivity extends Activity implements ZXingScannerView.ResultHandler {
|
||||
private static final int MY_PERMISSIONS_CAMERA = 1002;
|
||||
|
||||
private ZXingScannerView mScannerView = null;
|
||||
final String TAG = "org.electrum.SimpleScannerActivity";
|
||||
|
||||
@Override
|
||||
public void onResume() {
|
||||
super.onResume();
|
||||
if (this.hasPermission()) {
|
||||
this.startCamera();
|
||||
} else {
|
||||
this.requestPermission();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPause() {
|
||||
super.onPause();
|
||||
if (null != mScannerView) {
|
||||
mScannerView.stopCamera(); // Stop camera on pause
|
||||
}
|
||||
}
|
||||
|
||||
private void startCamera() {
|
||||
mScannerView = new ZXingScannerView(this); // Programmatically initialize the scanner view
|
||||
mScannerView.setFormats(Arrays.asList(BarcodeFormat.QR_CODE));
|
||||
setContentView(mScannerView); // Set the scanner view as the content view
|
||||
mScannerView.setResultHandler(this); // Register ourselves as a handler for scan results.
|
||||
mScannerView.startCamera(); // Start camera on resume
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleResult(Result rawResult) {
|
||||
Intent resultIntent = new Intent();
|
||||
resultIntent.putExtra("text", rawResult.getText());
|
||||
resultIntent.putExtra("format", rawResult.getBarcodeFormat().toString());
|
||||
setResult(Activity.RESULT_OK, resultIntent);
|
||||
this.finish();
|
||||
}
|
||||
|
||||
private boolean hasPermission() {
|
||||
return (ActivityCompat.checkSelfPermission(this,
|
||||
Manifest.permission.CAMERA)
|
||||
== PackageManager.PERMISSION_GRANTED);
|
||||
}
|
||||
|
||||
private void requestPermission() {
|
||||
ActivityCompat.requestPermissions(this,
|
||||
new String[]{Manifest.permission.CAMERA},
|
||||
MY_PERMISSIONS_CAMERA);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRequestPermissionsResult(int requestCode,
|
||||
String permissions[], int[] grantResults) {
|
||||
switch (requestCode) {
|
||||
case MY_PERMISSIONS_CAMERA: {
|
||||
if (grantResults.length > 0
|
||||
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
|
||||
// permission was granted, yay!
|
||||
this.startCamera();
|
||||
} else {
|
||||
// permission denied
|
||||
this.finish();
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
Before Width: | Height: | Size: 2.3 KiB |
|
Before Width: | Height: | Size: 68 KiB |
|
Before Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 30 KiB |
@ -1,155 +0,0 @@
|
||||
# based on https://github.com/kivy/python-for-android/blob/master/Dockerfile
|
||||
|
||||
FROM ubuntu:18.04
|
||||
|
||||
ENV ANDROID_HOME="/opt/android"
|
||||
|
||||
# configure locale
|
||||
RUN apt update -qq > /dev/null && apt install -qq --yes --no-install-recommends \
|
||||
locales && \
|
||||
locale-gen en_US.UTF-8
|
||||
ENV LANG="en_US.UTF-8" \
|
||||
LANGUAGE="en_US.UTF-8" \
|
||||
LC_ALL="en_US.UTF-8"
|
||||
|
||||
RUN apt -y update -qq \
|
||||
&& apt -y install -qq --no-install-recommends curl unzip ca-certificates \
|
||||
&& apt -y autoremove
|
||||
|
||||
|
||||
ENV ANDROID_NDK_HOME="${ANDROID_HOME}/android-ndk"
|
||||
ENV ANDROID_NDK_VERSION="17c"
|
||||
ENV ANDROID_NDK_HOME_V="${ANDROID_NDK_HOME}-r${ANDROID_NDK_VERSION}"
|
||||
|
||||
# get the latest version from https://developer.android.com/ndk/downloads/index.html
|
||||
ENV ANDROID_NDK_ARCHIVE="android-ndk-r${ANDROID_NDK_VERSION}-linux-x86_64.zip"
|
||||
ENV ANDROID_NDK_DL_URL="https://dl.google.com/android/repository/${ANDROID_NDK_ARCHIVE}"
|
||||
|
||||
# download and install Android NDK
|
||||
RUN curl --location --progress-bar \
|
||||
"${ANDROID_NDK_DL_URL}" \
|
||||
--output "${ANDROID_NDK_ARCHIVE}" \
|
||||
&& mkdir --parents "${ANDROID_NDK_HOME_V}" \
|
||||
&& unzip -q "${ANDROID_NDK_ARCHIVE}" -d "${ANDROID_HOME}" \
|
||||
&& ln -sfn "${ANDROID_NDK_HOME_V}" "${ANDROID_NDK_HOME}" \
|
||||
&& rm -rf "${ANDROID_NDK_ARCHIVE}"
|
||||
|
||||
|
||||
ENV ANDROID_SDK_HOME="${ANDROID_HOME}/android-sdk"
|
||||
|
||||
# get the latest version from https://developer.android.com/studio/index.html
|
||||
ENV ANDROID_SDK_TOOLS_VERSION="4333796"
|
||||
ENV ANDROID_SDK_BUILD_TOOLS_VERSION="28.0.3"
|
||||
ENV ANDROID_SDK_TOOLS_ARCHIVE="sdk-tools-linux-${ANDROID_SDK_TOOLS_VERSION}.zip"
|
||||
ENV ANDROID_SDK_TOOLS_DL_URL="https://dl.google.com/android/repository/${ANDROID_SDK_TOOLS_ARCHIVE}"
|
||||
|
||||
# download and install Android SDK
|
||||
RUN curl --location --progress-bar \
|
||||
"${ANDROID_SDK_TOOLS_DL_URL}" \
|
||||
--output "${ANDROID_SDK_TOOLS_ARCHIVE}" \
|
||||
&& mkdir --parents "${ANDROID_SDK_HOME}" \
|
||||
&& unzip -q "${ANDROID_SDK_TOOLS_ARCHIVE}" -d "${ANDROID_SDK_HOME}" \
|
||||
&& rm -rf "${ANDROID_SDK_TOOLS_ARCHIVE}"
|
||||
|
||||
# update Android SDK, install Android API, Build Tools...
|
||||
RUN mkdir --parents "${ANDROID_SDK_HOME}/.android/" \
|
||||
&& echo '### User Sources for Android SDK Manager' \
|
||||
> "${ANDROID_SDK_HOME}/.android/repositories.cfg"
|
||||
|
||||
# accept Android licenses (JDK necessary!)
|
||||
RUN apt -y update -qq \
|
||||
&& apt -y install -qq --no-install-recommends openjdk-8-jdk \
|
||||
&& apt -y autoremove
|
||||
RUN yes | "${ANDROID_SDK_HOME}/tools/bin/sdkmanager" "build-tools;${ANDROID_SDK_BUILD_TOOLS_VERSION}" > /dev/null
|
||||
|
||||
# download platforms, API, build tools
|
||||
RUN "${ANDROID_SDK_HOME}/tools/bin/sdkmanager" "platforms;android-24" > /dev/null && \
|
||||
"${ANDROID_SDK_HOME}/tools/bin/sdkmanager" "platforms;android-28" > /dev/null && \
|
||||
"${ANDROID_SDK_HOME}/tools/bin/sdkmanager" "build-tools;${ANDROID_SDK_BUILD_TOOLS_VERSION}" > /dev/null && \
|
||||
"${ANDROID_SDK_HOME}/tools/bin/sdkmanager" "extras;android;m2repository" > /dev/null && \
|
||||
chmod +x "${ANDROID_SDK_HOME}/tools/bin/avdmanager"
|
||||
|
||||
|
||||
ENV USER="user"
|
||||
ENV HOME_DIR="/home/${USER}"
|
||||
ENV WORK_DIR="${HOME_DIR}/wspace" \
|
||||
PATH="${HOME_DIR}/.local/bin:${PATH}"
|
||||
|
||||
# install system dependencies
|
||||
RUN apt -y update -qq \
|
||||
&& apt -y install -qq --no-install-recommends \
|
||||
python3 virtualenv python3-pip python3-setuptools git wget lbzip2 patch sudo \
|
||||
software-properties-common \
|
||||
&& apt -y autoremove
|
||||
|
||||
# install kivy
|
||||
RUN add-apt-repository ppa:kivy-team/kivy \
|
||||
&& apt -y update -qq \
|
||||
&& apt -y install -qq --no-install-recommends python3-kivy \
|
||||
&& apt -y autoremove \
|
||||
&& apt -y clean
|
||||
RUN python3 -m pip install image
|
||||
|
||||
# build dependencies
|
||||
# https://buildozer.readthedocs.io/en/latest/installation.html#android-on-ubuntu-16-04-64bit
|
||||
RUN dpkg --add-architecture i386 \
|
||||
&& apt -y update -qq \
|
||||
&& apt -y install -qq --no-install-recommends \
|
||||
build-essential ccache git python3 python3-dev \
|
||||
libncurses5:i386 libstdc++6:i386 libgtk2.0-0:i386 \
|
||||
libpangox-1.0-0:i386 libpangoxft-1.0-0:i386 libidn11:i386 \
|
||||
zip zlib1g-dev zlib1g:i386 \
|
||||
&& apt -y autoremove \
|
||||
&& apt -y clean
|
||||
|
||||
# specific recipes dependencies (e.g. libffi requires autoreconf binary)
|
||||
RUN apt -y update -qq \
|
||||
&& apt -y install -qq --no-install-recommends \
|
||||
libffi-dev autoconf automake cmake gettext libltdl-dev libtool pkg-config \
|
||||
&& apt -y autoremove \
|
||||
&& apt -y clean
|
||||
|
||||
|
||||
# prepare non root env
|
||||
RUN useradd --create-home --shell /bin/bash ${USER}
|
||||
|
||||
# with sudo access and no password
|
||||
RUN usermod -append --groups sudo ${USER}
|
||||
RUN echo "%sudo ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
|
||||
WORKDIR ${WORK_DIR}
|
||||
|
||||
# user needs ownership/write access to these directories
|
||||
RUN chown --recursive ${USER} ${WORK_DIR} ${ANDROID_SDK_HOME}
|
||||
RUN chown ${USER} /opt
|
||||
USER ${USER}
|
||||
|
||||
|
||||
RUN python3 -m pip install --upgrade cython==0.28.6
|
||||
|
||||
# prepare git
|
||||
RUN git config --global user.name "John Doe" \
|
||||
&& git config --global user.email johndoe@example.com
|
||||
|
||||
# install buildozer
|
||||
RUN cd /opt \
|
||||
&& git clone https://github.com/kivy/buildozer \
|
||||
&& cd buildozer \
|
||||
&& git checkout 88e4a4b0c7733eec1d14c00579ec412fb59ad7f2 \
|
||||
&& python3 -m pip install -e .
|
||||
|
||||
# install python-for-android
|
||||
RUN cd /opt \
|
||||
&& git clone https://github.com/kivy/python-for-android \
|
||||
&& cd python-for-android \
|
||||
&& git remote add sombernight https://github.com/SomberNight/python-for-android \
|
||||
&& git fetch --all \
|
||||
&& git checkout dec1badc3bd134a9a1c69275339423a95d63413e \
|
||||
# allowBackup="false":
|
||||
&& git cherry-pick 86eeec7c19679a5886d5e095ce0a43f1da138f87 \
|
||||
&& python3 -m pip install -e .
|
||||
|
||||
# build env vars
|
||||
ENV USE_SDK_WRAPPER=1
|
||||
ENV GRADLE_OPTS="-Xmx1536M -Dorg.gradle.jvmargs='-Xmx1536M'"
|
||||
@ -1,65 +0,0 @@
|
||||
from kivy.app import App
|
||||
from kivy.factory import Factory
|
||||
from kivy.properties import ObjectProperty
|
||||
from kivy.lang import Builder
|
||||
from kivy.uix.textinput import TextInput
|
||||
from kivy.properties import NumericProperty
|
||||
|
||||
Builder.load_string('''
|
||||
<FLODataDialog@Popup>
|
||||
id: popup
|
||||
title: ''
|
||||
size_hint: 0.8, 0.5
|
||||
pos_hint: {'top':0.9}
|
||||
BoxLayout:
|
||||
orientation: 'vertical'
|
||||
Widget:
|
||||
size_hint: 1, 0.1
|
||||
LimitedInput:
|
||||
id:input
|
||||
padding: '5dp'
|
||||
size_hint: 1, 1
|
||||
height: '27dp'
|
||||
max_characters: 1022
|
||||
pos_hint: {'center_y':.5}
|
||||
text:''
|
||||
multiline: True
|
||||
background_normal: 'atlas://electrum/gui/kivy/theming/light/tab_btn'
|
||||
background_active: 'atlas://electrum/gui/kivy/theming/light/tab_btn'
|
||||
hint_text_color: self.foreground_color
|
||||
foreground_color: 1, 1, 1, 1
|
||||
font_size: '16dp'
|
||||
focus: True
|
||||
Widget:
|
||||
size_hint: 1, 0.2
|
||||
BoxLayout:
|
||||
orientation: 'horizontal'
|
||||
size_hint: 1, 0.5
|
||||
Button:
|
||||
text: 'Cancel'
|
||||
size_hint: 0.5, None
|
||||
height: '48dp'
|
||||
on_release: popup.dismiss()
|
||||
Button:
|
||||
text: 'OK'
|
||||
size_hint: 0.5, None
|
||||
height: '48dp'
|
||||
on_release:
|
||||
root.callback(input.text)
|
||||
popup.dismiss()
|
||||
''')
|
||||
|
||||
class LimitedInput(TextInput):
|
||||
max_characters = NumericProperty(0)
|
||||
def insert_text(self, substring, from_undo=False):
|
||||
if len(self.text) > self.max_characters and self.max_characters > 0:
|
||||
substring = ""
|
||||
TextInput.insert_text(self, substring, from_undo)
|
||||
|
||||
class FLODataDialog(Factory.Popup):
|
||||
|
||||
def __init__(self, title, text, callback):
|
||||
Factory.Popup.__init__(self)
|
||||
self.ids.input.text = text
|
||||
self.callback = callback
|
||||
self.title = title
|
||||
@ -1,124 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2015 Thomas Voegtlin
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import webbrowser
|
||||
from enum import IntEnum
|
||||
|
||||
from PyQt5.QtGui import QStandardItemModel, QStandardItem
|
||||
from PyQt5.QtCore import Qt, QPersistentModelIndex, QModelIndex
|
||||
from PyQt5.QtWidgets import (QAbstractItemView, QMenu)
|
||||
|
||||
from electrum.i18n import _
|
||||
from electrum.bitcoin import is_address
|
||||
from electrum.util import block_explorer_URL
|
||||
from electrum.plugin import run_hook
|
||||
|
||||
from .util import MyTreeView, import_meta_gui, export_meta_gui
|
||||
|
||||
|
||||
class ContactList(MyTreeView):
|
||||
|
||||
class Columns(IntEnum):
|
||||
NAME = 0
|
||||
ADDRESS = 1
|
||||
|
||||
headers = {
|
||||
Columns.NAME: _('Name'),
|
||||
Columns.ADDRESS: _('Address'),
|
||||
}
|
||||
filter_columns = [Columns.NAME, Columns.ADDRESS]
|
||||
|
||||
def __init__(self, parent):
|
||||
super().__init__(parent, self.create_menu,
|
||||
stretch_column=self.Columns.NAME,
|
||||
editable_columns=[self.Columns.NAME])
|
||||
self.setModel(QStandardItemModel(self))
|
||||
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
|
||||
self.setSortingEnabled(True)
|
||||
self.update()
|
||||
|
||||
def on_edited(self, idx, user_role, text):
|
||||
_type, prior_name = self.parent.contacts.pop(user_role)
|
||||
self.parent.set_contact(text, user_role)
|
||||
self.update()
|
||||
|
||||
def import_contacts(self):
|
||||
import_meta_gui(self.parent, _('contacts'), self.parent.contacts.import_file, self.update)
|
||||
|
||||
def export_contacts(self):
|
||||
export_meta_gui(self.parent, _('contacts'), self.parent.contacts.export_file)
|
||||
|
||||
def create_menu(self, position):
|
||||
menu = QMenu()
|
||||
idx = self.indexAt(position)
|
||||
column = idx.column() or self.Columns.NAME
|
||||
selected_keys = []
|
||||
for s_idx in self.selected_in_column(self.Columns.NAME):
|
||||
sel_key = self.model().itemFromIndex(s_idx).data(Qt.UserRole)
|
||||
selected_keys.append(sel_key)
|
||||
if not selected_keys or not idx.isValid():
|
||||
menu.addAction(_("New contact"), lambda: self.parent.new_contact_dialog())
|
||||
menu.addAction(_("Import file"), lambda: self.import_contacts())
|
||||
menu.addAction(_("Export file"), lambda: self.export_contacts())
|
||||
else:
|
||||
column_title = self.model().horizontalHeaderItem(column).text()
|
||||
column_data = '\n'.join(self.model().itemFromIndex(s_idx).text()
|
||||
for s_idx in self.selected_in_column(column))
|
||||
menu.addAction(_("Copy {}").format(column_title), lambda: self.parent.app.clipboard().setText(column_data))
|
||||
if column in self.editable_columns:
|
||||
item = self.model().itemFromIndex(idx)
|
||||
if item.isEditable():
|
||||
# would not be editable if openalias
|
||||
persistent = QPersistentModelIndex(idx)
|
||||
menu.addAction(_("Edit {}").format(column_title), lambda p=persistent: self.edit(QModelIndex(p)))
|
||||
menu.addAction(_("Pay to"), lambda: self.parent.payto_contacts(selected_keys))
|
||||
menu.addAction(_("Delete"), lambda: self.parent.delete_contacts(selected_keys))
|
||||
URLs = [block_explorer_URL(self.config, 'addr', key) for key in filter(is_address, selected_keys)]
|
||||
if URLs:
|
||||
menu.addAction(_("View on block explorer"), lambda: [webbrowser.open(u) for u in URLs])
|
||||
|
||||
run_hook('create_contact_menu', menu, selected_keys)
|
||||
menu.exec_(self.viewport().mapToGlobal(position))
|
||||
|
||||
def update(self):
|
||||
current_key = self.current_item_user_role(col=self.Columns.NAME)
|
||||
self.model().clear()
|
||||
self.update_headers(self.__class__.headers)
|
||||
set_current = None
|
||||
for key in sorted(self.parent.contacts.keys()):
|
||||
contact_type, name = self.parent.contacts[key]
|
||||
items = [QStandardItem(x) for x in (name, key)]
|
||||
items[self.Columns.NAME].setEditable(contact_type != 'openalias')
|
||||
items[self.Columns.ADDRESS].setEditable(False)
|
||||
items[self.Columns.NAME].setData(key, Qt.UserRole)
|
||||
row_count = self.model().rowCount()
|
||||
self.model().insertRow(row_count, items)
|
||||
if key == current_key:
|
||||
idx = self.model().index(row_count, self.Columns.NAME)
|
||||
set_current = QPersistentModelIndex(idx)
|
||||
self.set_current_idx(set_current)
|
||||
# FIXME refresh loses sort order; so set "default" here:
|
||||
self.sortByColumn(self.Columns.NAME, Qt.AscendingOrder)
|
||||
run_hook('update_contacts_tab', self)
|
||||
@ -1,701 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2015 Thomas Voegtlin
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import os
|
||||
import webbrowser
|
||||
import datetime
|
||||
from datetime import date
|
||||
from typing import TYPE_CHECKING, Tuple, Dict
|
||||
import threading
|
||||
from enum import IntEnum
|
||||
from decimal import Decimal
|
||||
|
||||
from PyQt5.QtGui import QMouseEvent, QFont, QBrush, QColor
|
||||
from PyQt5.QtCore import (Qt, QPersistentModelIndex, QModelIndex, QAbstractItemModel,
|
||||
QSortFilterProxyModel, QVariant, QItemSelectionModel, QDate, QPoint)
|
||||
from PyQt5.QtWidgets import (QMenu, QHeaderView, QLabel, QMessageBox,
|
||||
QPushButton, QComboBox, QVBoxLayout, QCalendarWidget,
|
||||
QGridLayout)
|
||||
|
||||
from electrum.address_synchronizer import TX_HEIGHT_LOCAL
|
||||
from electrum.i18n import _
|
||||
from electrum.util import (block_explorer_URL, profiler, print_error, TxMinedInfo,
|
||||
OrderedDictWithIndex, PrintError, timestamp_to_datetime)
|
||||
|
||||
from .util import (read_QIcon, MONOSPACE_FONT, Buttons, CancelButton, OkButton,
|
||||
filename_field, MyTreeView, AcceptFileDragDrop, WindowModalDialog,
|
||||
CloseButton)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from electrum.wallet import Abstract_Wallet
|
||||
|
||||
try:
|
||||
from electrum.plot import plot_history, NothingToPlotException
|
||||
except:
|
||||
print_error("qt/history_list: could not import electrum.plot. This feature needs matplotlib to be installed.")
|
||||
plot_history = None
|
||||
|
||||
import pdb
|
||||
|
||||
# note: this list needs to be kept in sync with another in kivy
|
||||
TX_ICONS = [
|
||||
"unconfirmed.png",
|
||||
"warning.png",
|
||||
"unconfirmed.png",
|
||||
"offline_tx.png",
|
||||
"clock1.png",
|
||||
"clock2.png",
|
||||
"clock3.png",
|
||||
"clock4.png",
|
||||
"clock5.png",
|
||||
"confirmed.png",
|
||||
]
|
||||
|
||||
class HistoryColumns(IntEnum):
|
||||
STATUS_ICON = 0
|
||||
STATUS_TEXT = 1
|
||||
DESCRIPTION = 2
|
||||
COIN_VALUE = 3
|
||||
RUNNING_COIN_BALANCE = 4
|
||||
FIAT_VALUE = 5
|
||||
FIAT_ACQ_PRICE = 6
|
||||
FIAT_CAP_GAINS = 7
|
||||
TXID = 8
|
||||
FLO_DATA = 9
|
||||
|
||||
class HistorySortModel(QSortFilterProxyModel):
|
||||
def lessThan(self, source_left: QModelIndex, source_right: QModelIndex):
|
||||
item1 = self.sourceModel().data(source_left, Qt.UserRole)
|
||||
item2 = self.sourceModel().data(source_right, Qt.UserRole)
|
||||
if item1 is None or item2 is None:
|
||||
raise Exception(f'UserRole not set for column {source_left.column()}')
|
||||
v1 = item1.value()
|
||||
v2 = item2.value()
|
||||
if v1 is None or isinstance(v1, Decimal) and v1.is_nan(): v1 = -float("inf")
|
||||
if v2 is None or isinstance(v2, Decimal) and v2.is_nan(): v2 = -float("inf")
|
||||
try:
|
||||
return v1 < v2
|
||||
except:
|
||||
return False
|
||||
|
||||
class HistoryModel(QAbstractItemModel, PrintError):
|
||||
|
||||
def __init__(self, parent):
|
||||
super().__init__(parent)
|
||||
self.parent = parent
|
||||
self.view = None # type: HistoryList
|
||||
self.transactions = OrderedDictWithIndex()
|
||||
self.tx_status_cache = {} # type: Dict[str, Tuple[int, str]]
|
||||
self.summary = None
|
||||
|
||||
def set_view(self, history_list: 'HistoryList'):
|
||||
# FIXME HistoryModel and HistoryList mutually depend on each other.
|
||||
# After constructing both, this method needs to be called.
|
||||
self.view = history_list # type: HistoryList
|
||||
self.set_visibility_of_columns()
|
||||
|
||||
def columnCount(self, parent: QModelIndex):
|
||||
return len(HistoryColumns)
|
||||
|
||||
def rowCount(self, parent: QModelIndex):
|
||||
return len(self.transactions)
|
||||
|
||||
def index(self, row: int, column: int, parent: QModelIndex):
|
||||
return self.createIndex(row, column)
|
||||
|
||||
def data(self, index: QModelIndex, role: Qt.ItemDataRole) -> QVariant:
|
||||
# note: this method is performance-critical.
|
||||
# it is called a lot, and so must run extremely fast.
|
||||
assert index.isValid()
|
||||
col = index.column()
|
||||
tx_item = self.transactions.value_from_pos(index.row())
|
||||
tx_hash = tx_item['txid']
|
||||
conf = tx_item['confirmations']
|
||||
txpos = tx_item['txpos_in_block'] or 0
|
||||
height = tx_item['height']
|
||||
flodata = self.parent.wallet.get_flodata(tx_hash)
|
||||
try:
|
||||
status, status_str = self.tx_status_cache[tx_hash]
|
||||
except KeyError:
|
||||
tx_mined_info = self.tx_mined_info_from_tx_item(tx_item)
|
||||
status, status_str = self.parent.wallet.get_tx_status(tx_hash, tx_mined_info)
|
||||
if role == Qt.UserRole:
|
||||
# for sorting
|
||||
d = {
|
||||
HistoryColumns.STATUS_ICON:
|
||||
# height breaks ties for unverified txns
|
||||
# txpos breaks ties for verified same block txns
|
||||
(status, conf, -height, -txpos),
|
||||
HistoryColumns.STATUS_TEXT: status_str,
|
||||
HistoryColumns.DESCRIPTION: tx_item['label'],
|
||||
HistoryColumns.COIN_VALUE: tx_item['value'].value,
|
||||
HistoryColumns.RUNNING_COIN_BALANCE: tx_item['balance'].value,
|
||||
HistoryColumns.FIAT_VALUE:
|
||||
tx_item['fiat_value'].value if 'fiat_value' in tx_item else None,
|
||||
HistoryColumns.FIAT_ACQ_PRICE:
|
||||
tx_item['acquisition_price'].value if 'acquisition_price' in tx_item else None,
|
||||
HistoryColumns.FIAT_CAP_GAINS:
|
||||
tx_item['capital_gain'].value if 'capital_gain' in tx_item else None,
|
||||
HistoryColumns.TXID: tx_hash,
|
||||
HistoryColumns.FLO_DATA: flodata,
|
||||
}
|
||||
return QVariant(d[col])
|
||||
if role not in (Qt.DisplayRole, Qt.EditRole):
|
||||
if col == HistoryColumns.STATUS_ICON and role == Qt.DecorationRole:
|
||||
return QVariant(read_QIcon(TX_ICONS[status]))
|
||||
elif col == HistoryColumns.STATUS_ICON and role == Qt.ToolTipRole:
|
||||
return QVariant(str(conf) + _(" confirmation" + ("s" if conf != 1 else "")))
|
||||
elif col > HistoryColumns.DESCRIPTION and role == Qt.TextAlignmentRole:
|
||||
return QVariant(Qt.AlignRight | Qt.AlignVCenter)
|
||||
elif col != HistoryColumns.STATUS_TEXT and role == Qt.FontRole:
|
||||
monospace_font = QFont(MONOSPACE_FONT)
|
||||
return QVariant(monospace_font)
|
||||
elif col == HistoryColumns.DESCRIPTION and role == Qt.DecorationRole \
|
||||
and self.parent.wallet.invoices.paid.get(tx_hash):
|
||||
return QVariant(read_QIcon("seal"))
|
||||
elif col in (HistoryColumns.DESCRIPTION, HistoryColumns.COIN_VALUE) \
|
||||
and role == Qt.ForegroundRole and tx_item['value'].value < 0:
|
||||
red_brush = QBrush(QColor("#BC1E1E"))
|
||||
return QVariant(red_brush)
|
||||
elif col == HistoryColumns.FIAT_VALUE and role == Qt.ForegroundRole \
|
||||
and not tx_item.get('fiat_default') and tx_item.get('fiat_value') is not None:
|
||||
blue_brush = QBrush(QColor("#1E1EFF"))
|
||||
return QVariant(blue_brush)
|
||||
return QVariant()
|
||||
if col == HistoryColumns.STATUS_TEXT:
|
||||
return QVariant(status_str)
|
||||
elif col == HistoryColumns.DESCRIPTION:
|
||||
return QVariant(tx_item['label'])
|
||||
elif col == HistoryColumns.COIN_VALUE:
|
||||
value = tx_item['value'].value
|
||||
v_str = self.parent.format_amount(value, is_diff=True, whitespaces=True)
|
||||
return QVariant(v_str)
|
||||
elif col == HistoryColumns.RUNNING_COIN_BALANCE:
|
||||
balance = tx_item['balance'].value
|
||||
balance_str = self.parent.format_amount(balance, whitespaces=True)
|
||||
return QVariant(balance_str)
|
||||
elif col == HistoryColumns.FIAT_VALUE and 'fiat_value' in tx_item:
|
||||
value_str = self.parent.fx.format_fiat(tx_item['fiat_value'].value)
|
||||
return QVariant(value_str)
|
||||
elif col == HistoryColumns.FIAT_ACQ_PRICE and \
|
||||
tx_item['value'].value < 0 and 'acquisition_price' in tx_item:
|
||||
# fixme: should use is_mine
|
||||
acq = tx_item['acquisition_price'].value
|
||||
return QVariant(self.parent.fx.format_fiat(acq))
|
||||
elif col == HistoryColumns.FIAT_CAP_GAINS and 'capital_gain' in tx_item:
|
||||
cg = tx_item['capital_gain'].value
|
||||
return QVariant(self.parent.fx.format_fiat(cg))
|
||||
elif col == HistoryColumns.TXID:
|
||||
return QVariant(tx_hash)
|
||||
elif col == HistoryColumns.FLO_DATA:
|
||||
return QVariant(flodata)
|
||||
return QVariant()
|
||||
|
||||
def parent(self, index: QModelIndex):
|
||||
return QModelIndex()
|
||||
|
||||
def hasChildren(self, index: QModelIndex):
|
||||
return not index.isValid()
|
||||
|
||||
def update_label(self, row):
|
||||
tx_item = self.transactions.value_from_pos(row)
|
||||
tx_item['label'] = self.parent.wallet.get_label(tx_item['txid'])
|
||||
topLeft = bottomRight = self.createIndex(row, 2)
|
||||
self.dataChanged.emit(topLeft, bottomRight, [Qt.DisplayRole])
|
||||
|
||||
def get_domain(self):
|
||||
'''Overridden in address_dialog.py'''
|
||||
return self.parent.wallet.get_addresses()
|
||||
|
||||
@profiler
|
||||
def refresh(self, reason: str):
|
||||
self.print_error(f"refreshing... reason: {reason}")
|
||||
assert self.parent.gui_thread == threading.current_thread(), 'must be called from GUI thread'
|
||||
assert self.view, 'view not set'
|
||||
selected = self.view.selectionModel().currentIndex()
|
||||
selected_row = None
|
||||
if selected:
|
||||
selected_row = selected.row()
|
||||
fx = self.parent.fx
|
||||
if fx: fx.history_used_spot = False
|
||||
r = self.parent.wallet.get_full_history(domain=self.get_domain(), from_timestamp=None, to_timestamp=None, fx=fx)
|
||||
self.set_visibility_of_columns()
|
||||
if r['transactions'] == list(self.transactions.values()):
|
||||
return
|
||||
old_length = len(self.transactions)
|
||||
if old_length != 0:
|
||||
self.beginRemoveRows(QModelIndex(), 0, old_length)
|
||||
self.transactions.clear()
|
||||
self.endRemoveRows()
|
||||
self.beginInsertRows(QModelIndex(), 0, len(r['transactions'])-1)
|
||||
for tx_item in r['transactions']:
|
||||
txid = tx_item['txid']
|
||||
self.transactions[txid] = tx_item
|
||||
self.endInsertRows()
|
||||
if selected_row:
|
||||
self.view.selectionModel().select(self.createIndex(selected_row, 0), QItemSelectionModel.Rows | QItemSelectionModel.SelectCurrent)
|
||||
f = self.view.current_filter
|
||||
if f:
|
||||
self.view.filter(f)
|
||||
# update summary
|
||||
self.summary = r['summary']
|
||||
if not self.view.years and self.transactions:
|
||||
start_date = date.today()
|
||||
end_date = date.today()
|
||||
if len(self.transactions) > 0:
|
||||
start_date = self.transactions.value_from_pos(0).get('date') or start_date
|
||||
end_date = self.transactions.value_from_pos(len(self.transactions) - 1).get('date') or end_date
|
||||
self.view.years = [str(i) for i in range(start_date.year, end_date.year + 1)]
|
||||
self.view.period_combo.insertItems(1, self.view.years)
|
||||
# update tx_status_cache
|
||||
self.tx_status_cache.clear()
|
||||
for txid, tx_item in self.transactions.items():
|
||||
tx_mined_info = self.tx_mined_info_from_tx_item(tx_item)
|
||||
self.tx_status_cache[txid] = self.parent.wallet.get_tx_status(txid, tx_mined_info)
|
||||
|
||||
def set_visibility_of_columns(self):
|
||||
def set_visible(col: int, b: bool):
|
||||
self.view.showColumn(col) if b else self.view.hideColumn(col)
|
||||
# txid
|
||||
set_visible(HistoryColumns.TXID, False)
|
||||
# fiat
|
||||
history = self.parent.fx.show_history()
|
||||
cap_gains = self.parent.fx.get_history_capital_gains_config()
|
||||
set_visible(HistoryColumns.FIAT_VALUE, history)
|
||||
set_visible(HistoryColumns.FIAT_ACQ_PRICE, history and cap_gains)
|
||||
set_visible(HistoryColumns.FIAT_CAP_GAINS, history and cap_gains)
|
||||
|
||||
def update_fiat(self, row, idx):
|
||||
tx_item = self.transactions.value_from_pos(row)
|
||||
key = tx_item['txid']
|
||||
fee = tx_item.get('fee')
|
||||
value = tx_item['value'].value
|
||||
fiat_fields = self.parent.wallet.get_tx_item_fiat(key, value, self.parent.fx, fee.value if fee else None)
|
||||
tx_item.update(fiat_fields)
|
||||
self.dataChanged.emit(idx, idx, [Qt.DisplayRole, Qt.ForegroundRole])
|
||||
|
||||
def update_tx_mined_status(self, tx_hash: str, tx_mined_info: TxMinedInfo):
|
||||
try:
|
||||
row = self.transactions.pos_from_key(tx_hash)
|
||||
tx_item = self.transactions[tx_hash]
|
||||
except KeyError:
|
||||
return
|
||||
self.tx_status_cache[tx_hash] = self.parent.wallet.get_tx_status(tx_hash, tx_mined_info)
|
||||
tx_item.update({
|
||||
'confirmations': tx_mined_info.conf,
|
||||
'timestamp': tx_mined_info.timestamp,
|
||||
'txpos_in_block': tx_mined_info.txpos,
|
||||
'date': timestamp_to_datetime(tx_mined_info.timestamp),
|
||||
})
|
||||
topLeft = self.createIndex(row, 0)
|
||||
bottomRight = self.createIndex(row, len(HistoryColumns) - 1)
|
||||
self.dataChanged.emit(topLeft, bottomRight)
|
||||
|
||||
def on_fee_histogram(self):
|
||||
for tx_hash, tx_item in list(self.transactions.items()):
|
||||
tx_mined_info = self.tx_mined_info_from_tx_item(tx_item)
|
||||
if tx_mined_info.conf > 0:
|
||||
# note: we could actually break here if we wanted to rely on the order of txns in self.transactions
|
||||
continue
|
||||
self.update_tx_mined_status(tx_hash, tx_mined_info)
|
||||
|
||||
def headerData(self, section: int, orientation: Qt.Orientation, role: Qt.ItemDataRole):
|
||||
assert orientation == Qt.Horizontal
|
||||
if role != Qt.DisplayRole:
|
||||
return None
|
||||
fx = self.parent.fx
|
||||
fiat_title = 'n/a fiat value'
|
||||
fiat_acq_title = 'n/a fiat acquisition price'
|
||||
fiat_cg_title = 'n/a fiat capital gains'
|
||||
if fx and fx.show_history():
|
||||
fiat_title = '%s '%fx.ccy + _('Value')
|
||||
fiat_acq_title = '%s '%fx.ccy + _('Acquisition price')
|
||||
fiat_cg_title = '%s '%fx.ccy + _('Capital Gains')
|
||||
return {
|
||||
HistoryColumns.STATUS_ICON: '',
|
||||
HistoryColumns.STATUS_TEXT: _('Date'),
|
||||
HistoryColumns.DESCRIPTION: _('Description'),
|
||||
HistoryColumns.COIN_VALUE: _('Amount'),
|
||||
HistoryColumns.RUNNING_COIN_BALANCE: _('Balance'),
|
||||
HistoryColumns.FIAT_VALUE: fiat_title,
|
||||
HistoryColumns.FIAT_ACQ_PRICE: fiat_acq_title,
|
||||
HistoryColumns.FIAT_CAP_GAINS: fiat_cg_title,
|
||||
HistoryColumns.TXID: 'TXID',
|
||||
HistoryColumns.FLO_DATA: _('FLO Data'),
|
||||
}[section]
|
||||
|
||||
def flags(self, idx):
|
||||
extra_flags = Qt.NoItemFlags # type: Qt.ItemFlag
|
||||
if idx.column() in self.view.editable_columns:
|
||||
extra_flags |= Qt.ItemIsEditable
|
||||
return super().flags(idx) | extra_flags
|
||||
|
||||
@staticmethod
|
||||
def tx_mined_info_from_tx_item(tx_item):
|
||||
tx_mined_info = TxMinedInfo(height=tx_item['height'],
|
||||
conf=tx_item['confirmations'],
|
||||
timestamp=tx_item['timestamp'])
|
||||
return tx_mined_info
|
||||
|
||||
class HistoryList(MyTreeView, AcceptFileDragDrop):
|
||||
filter_columns = [HistoryColumns.STATUS_TEXT,
|
||||
HistoryColumns.DESCRIPTION,
|
||||
HistoryColumns.COIN_VALUE,
|
||||
HistoryColumns.TXID]
|
||||
|
||||
def tx_item_from_proxy_row(self, proxy_row):
|
||||
hm_idx = self.model().mapToSource(self.model().index(proxy_row, 0))
|
||||
return self.hm.transactions.value_from_pos(hm_idx.row())
|
||||
|
||||
def should_hide(self, proxy_row):
|
||||
if self.start_timestamp and self.end_timestamp:
|
||||
tx_item = self.tx_item_from_proxy_row(proxy_row)
|
||||
date = tx_item['date']
|
||||
if date:
|
||||
in_interval = self.start_timestamp <= date <= self.end_timestamp
|
||||
if not in_interval:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __init__(self, parent, model: HistoryModel):
|
||||
super().__init__(parent, self.create_menu, stretch_column=HistoryColumns.DESCRIPTION)
|
||||
self.hm = model
|
||||
self.proxy = HistorySortModel(self)
|
||||
self.proxy.setSourceModel(model)
|
||||
self.setModel(self.proxy)
|
||||
|
||||
self.config = parent.config
|
||||
AcceptFileDragDrop.__init__(self, ".txn")
|
||||
self.setSortingEnabled(True)
|
||||
self.start_timestamp = None
|
||||
self.end_timestamp = None
|
||||
self.years = []
|
||||
self.create_toolbar_buttons()
|
||||
self.wallet = self.parent.wallet # type: Abstract_Wallet
|
||||
self.sortByColumn(HistoryColumns.STATUS_ICON, Qt.AscendingOrder)
|
||||
self.editable_columns |= {HistoryColumns.FIAT_VALUE}
|
||||
|
||||
self.header().setStretchLastSection(False)
|
||||
for col in HistoryColumns:
|
||||
sm = QHeaderView.Stretch if col == self.stretch_column else QHeaderView.ResizeToContents
|
||||
self.header().setSectionResizeMode(col, sm)
|
||||
|
||||
def format_date(self, d):
|
||||
return str(datetime.date(d.year, d.month, d.day)) if d else _('None')
|
||||
|
||||
def on_combo(self, x):
|
||||
s = self.period_combo.itemText(x)
|
||||
x = s == _('Custom')
|
||||
self.start_button.setEnabled(x)
|
||||
self.end_button.setEnabled(x)
|
||||
if s == _('All'):
|
||||
self.start_timestamp = None
|
||||
self.end_timestamp = None
|
||||
self.start_button.setText("-")
|
||||
self.end_button.setText("-")
|
||||
else:
|
||||
try:
|
||||
year = int(s)
|
||||
except:
|
||||
return
|
||||
self.start_timestamp = start_date = datetime.datetime(year, 1, 1)
|
||||
self.end_timestamp = end_date = datetime.datetime(year+1, 1, 1)
|
||||
self.start_button.setText(_('From') + ' ' + self.format_date(start_date))
|
||||
self.end_button.setText(_('To') + ' ' + self.format_date(end_date))
|
||||
self.hide_rows()
|
||||
|
||||
def create_toolbar_buttons(self):
|
||||
self.period_combo = QComboBox()
|
||||
self.start_button = QPushButton('-')
|
||||
self.start_button.pressed.connect(self.select_start_date)
|
||||
self.start_button.setEnabled(False)
|
||||
self.end_button = QPushButton('-')
|
||||
self.end_button.pressed.connect(self.select_end_date)
|
||||
self.end_button.setEnabled(False)
|
||||
self.period_combo.addItems([_('All'), _('Custom')])
|
||||
self.period_combo.activated.connect(self.on_combo)
|
||||
|
||||
def get_toolbar_buttons(self):
|
||||
return self.period_combo, self.start_button, self.end_button
|
||||
|
||||
def on_hide_toolbar(self):
|
||||
self.start_timestamp = None
|
||||
self.end_timestamp = None
|
||||
self.hide_rows()
|
||||
|
||||
def save_toolbar_state(self, state, config):
|
||||
config.set_key('show_toolbar_history', state)
|
||||
|
||||
def select_start_date(self):
|
||||
self.start_timestamp = self.select_date(self.start_button)
|
||||
self.hide_rows()
|
||||
|
||||
def select_end_date(self):
|
||||
self.end_timestamp = self.select_date(self.end_button)
|
||||
self.hide_rows()
|
||||
|
||||
def select_date(self, button):
|
||||
d = WindowModalDialog(self, _("Select date"))
|
||||
d.setMinimumSize(600, 150)
|
||||
d.date = None
|
||||
vbox = QVBoxLayout()
|
||||
def on_date(date):
|
||||
d.date = date
|
||||
cal = QCalendarWidget()
|
||||
cal.setGridVisible(True)
|
||||
cal.clicked[QDate].connect(on_date)
|
||||
vbox.addWidget(cal)
|
||||
vbox.addLayout(Buttons(OkButton(d), CancelButton(d)))
|
||||
d.setLayout(vbox)
|
||||
if d.exec_():
|
||||
if d.date is None:
|
||||
return None
|
||||
date = d.date.toPyDate()
|
||||
button.setText(self.format_date(date))
|
||||
return datetime.datetime(date.year, date.month, date.day)
|
||||
|
||||
def show_summary(self):
|
||||
h = self.model().sourceModel().summary
|
||||
if not h:
|
||||
self.parent.show_message(_("Nothing to summarize."))
|
||||
return
|
||||
start_date = h.get('start_date')
|
||||
end_date = h.get('end_date')
|
||||
format_amount = lambda x: self.parent.format_amount(x.value) + ' ' + self.parent.base_unit()
|
||||
d = WindowModalDialog(self, _("Summary"))
|
||||
d.setMinimumSize(600, 150)
|
||||
vbox = QVBoxLayout()
|
||||
grid = QGridLayout()
|
||||
grid.addWidget(QLabel(_("Start")), 0, 0)
|
||||
grid.addWidget(QLabel(self.format_date(start_date)), 0, 1)
|
||||
grid.addWidget(QLabel(str(h.get('start_fiat_value')) + '/FLO'), 0, 2)
|
||||
grid.addWidget(QLabel(_("Initial balance")), 1, 0)
|
||||
grid.addWidget(QLabel(format_amount(h['start_balance'])), 1, 1)
|
||||
grid.addWidget(QLabel(str(h.get('start_fiat_balance'))), 1, 2)
|
||||
grid.addWidget(QLabel(_("End")), 2, 0)
|
||||
grid.addWidget(QLabel(self.format_date(end_date)), 2, 1)
|
||||
grid.addWidget(QLabel(str(h.get('end_fiat_value')) + '/FLO'), 2, 2)
|
||||
grid.addWidget(QLabel(_("Final balance")), 4, 0)
|
||||
grid.addWidget(QLabel(format_amount(h['end_balance'])), 4, 1)
|
||||
grid.addWidget(QLabel(str(h.get('end_fiat_balance'))), 4, 2)
|
||||
grid.addWidget(QLabel(_("Income")), 5, 0)
|
||||
grid.addWidget(QLabel(format_amount(h.get('income'))), 5, 1)
|
||||
grid.addWidget(QLabel(str(h.get('fiat_income'))), 5, 2)
|
||||
grid.addWidget(QLabel(_("Expenditures")), 6, 0)
|
||||
grid.addWidget(QLabel(format_amount(h.get('expenditures'))), 6, 1)
|
||||
grid.addWidget(QLabel(str(h.get('fiat_expenditures'))), 6, 2)
|
||||
grid.addWidget(QLabel(_("Capital gains")), 7, 0)
|
||||
grid.addWidget(QLabel(str(h.get('capital_gains'))), 7, 2)
|
||||
grid.addWidget(QLabel(_("Unrealized gains")), 8, 0)
|
||||
grid.addWidget(QLabel(str(h.get('unrealized_gains', ''))), 8, 2)
|
||||
vbox.addLayout(grid)
|
||||
vbox.addLayout(Buttons(CloseButton(d)))
|
||||
d.setLayout(vbox)
|
||||
d.exec_()
|
||||
|
||||
def plot_history_dialog(self):
|
||||
if plot_history is None:
|
||||
self.parent.show_message(
|
||||
_("Can't plot history.") + '\n' +
|
||||
_("Perhaps some dependencies are missing...") + " (matplotlib?)")
|
||||
return
|
||||
try:
|
||||
plt = plot_history(list(self.hm.transactions.values()))
|
||||
plt.show()
|
||||
except NothingToPlotException as e:
|
||||
self.parent.show_message(str(e))
|
||||
|
||||
def on_edited(self, index, user_role, text):
|
||||
index = self.model().mapToSource(index)
|
||||
row, column = index.row(), index.column()
|
||||
tx_item = self.hm.transactions.value_from_pos(row)
|
||||
key = tx_item['txid']
|
||||
if column == HistoryColumns.DESCRIPTION:
|
||||
if self.wallet.set_label(key, text): #changed
|
||||
self.hm.update_label(row)
|
||||
self.parent.update_completions()
|
||||
elif column == HistoryColumns.FIAT_VALUE:
|
||||
self.wallet.set_fiat_value(key, self.parent.fx.ccy, text, self.parent.fx, tx_item['value'].value)
|
||||
value = tx_item['value'].value
|
||||
if value is not None:
|
||||
self.hm.update_fiat(row, index)
|
||||
else:
|
||||
assert False
|
||||
|
||||
def mouseDoubleClickEvent(self, event: QMouseEvent):
|
||||
idx = self.indexAt(event.pos())
|
||||
if not idx.isValid():
|
||||
return
|
||||
tx_item = self.tx_item_from_proxy_row(idx.row())
|
||||
if self.hm.flags(self.model().mapToSource(idx)) & Qt.ItemIsEditable:
|
||||
super().mouseDoubleClickEvent(event)
|
||||
else:
|
||||
self.show_transaction(tx_item['txid'])
|
||||
|
||||
def show_transaction(self, tx_hash):
|
||||
tx = self.wallet.transactions.get(tx_hash)
|
||||
if not tx:
|
||||
return
|
||||
label = self.wallet.get_label(tx_hash) or None # prefer 'None' if not defined (force tx dialog to hide Description field if missing)
|
||||
self.parent.show_transaction(tx, label)
|
||||
|
||||
def create_menu(self, position: QPoint):
|
||||
org_idx: QModelIndex = self.indexAt(position)
|
||||
idx = self.proxy.mapToSource(org_idx)
|
||||
if not idx.isValid():
|
||||
# can happen e.g. before list is populated for the first time
|
||||
return
|
||||
tx_item = self.hm.transactions.value_from_pos(idx.row())
|
||||
column = idx.column()
|
||||
if column == HistoryColumns.STATUS_ICON:
|
||||
column_title = _('Transaction ID')
|
||||
column_data = tx_item['txid']
|
||||
else:
|
||||
column_title = self.hm.headerData(column, Qt.Horizontal, Qt.DisplayRole)
|
||||
column_data = self.hm.data(idx, Qt.DisplayRole).value()
|
||||
tx_hash = tx_item['txid']
|
||||
tx = self.wallet.transactions[tx_hash]
|
||||
tx_URL = block_explorer_URL(self.config, 'tx', tx_hash)
|
||||
height = self.wallet.get_tx_height(tx_hash).height
|
||||
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
|
||||
is_unconfirmed = height <= 0
|
||||
pr_key = self.wallet.invoices.paid.get(tx_hash)
|
||||
menu = QMenu()
|
||||
if height == TX_HEIGHT_LOCAL:
|
||||
menu.addAction(_("Remove"), lambda: self.remove_local_tx(tx_hash))
|
||||
menu.addAction(_("Copy {}").format(column_title), lambda: self.parent.app.clipboard().setText(column_data))
|
||||
for c in self.editable_columns:
|
||||
if self.isColumnHidden(c): continue
|
||||
label = self.hm.headerData(c, Qt.Horizontal, Qt.DisplayRole)
|
||||
# TODO use siblingAtColumn when min Qt version is >=5.11
|
||||
persistent = QPersistentModelIndex(org_idx.sibling(org_idx.row(), c))
|
||||
menu.addAction(_("Edit {}").format(label), lambda p=persistent: self.edit(QModelIndex(p)))
|
||||
menu.addAction(_("Details"), lambda: self.show_transaction(tx_hash))
|
||||
if is_unconfirmed and tx:
|
||||
# note: the current implementation of RBF *needs* the old tx fee
|
||||
rbf = is_mine and not tx.is_final() and fee is not None
|
||||
if rbf:
|
||||
menu.addAction(_("Increase fee"), lambda: self.parent.bump_fee_dialog(tx))
|
||||
else:
|
||||
child_tx = self.wallet.cpfp(tx, 0)
|
||||
if child_tx:
|
||||
menu.addAction(_("Child pays for parent"), lambda: self.parent.cpfp(tx, child_tx))
|
||||
if pr_key:
|
||||
menu.addAction(read_QIcon("seal"), _("View invoice"), lambda: self.parent.show_invoice(pr_key))
|
||||
if tx_URL:
|
||||
menu.addAction(_("View on block explorer"), lambda: webbrowser.open(tx_URL))
|
||||
menu.exec_(self.viewport().mapToGlobal(position))
|
||||
|
||||
def remove_local_tx(self, delete_tx):
|
||||
to_delete = {delete_tx}
|
||||
to_delete |= self.wallet.get_depending_transactions(delete_tx)
|
||||
question = _("Are you sure you want to remove this transaction?")
|
||||
if len(to_delete) > 1:
|
||||
question = _(
|
||||
"Are you sure you want to remove this transaction and {} child transactions?".format(len(to_delete) - 1)
|
||||
)
|
||||
answer = QMessageBox.question(self.parent, _("Please confirm"), question, QMessageBox.Yes, QMessageBox.No)
|
||||
if answer == QMessageBox.No:
|
||||
return
|
||||
for tx in to_delete:
|
||||
self.wallet.remove_transaction(tx)
|
||||
self.wallet.save_transactions(write=True)
|
||||
# need to update at least: history_list, utxo_list, address_list
|
||||
self.parent.need_update.set()
|
||||
|
||||
def onFileAdded(self, fn):
|
||||
try:
|
||||
with open(fn) as f:
|
||||
tx = self.parent.tx_from_text(f.read())
|
||||
self.parent.save_transaction_into_wallet(tx)
|
||||
except IOError as e:
|
||||
self.parent.show_error(e)
|
||||
|
||||
def export_history_dialog(self):
|
||||
d = WindowModalDialog(self, _('Export History'))
|
||||
d.setMinimumSize(400, 200)
|
||||
vbox = QVBoxLayout(d)
|
||||
defaultname = os.path.expanduser('~/electrum-history.csv')
|
||||
select_msg = _('Select file to export your wallet transactions to')
|
||||
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
|
||||
vbox.addLayout(hbox)
|
||||
vbox.addStretch(1)
|
||||
hbox = Buttons(CancelButton(d), OkButton(d, _('Export')))
|
||||
vbox.addLayout(hbox)
|
||||
#run_hook('export_history_dialog', self, hbox)
|
||||
self.update()
|
||||
if not d.exec_():
|
||||
return
|
||||
filename = filename_e.text()
|
||||
if not filename:
|
||||
return
|
||||
try:
|
||||
self.do_export_history(filename, csv_button.isChecked())
|
||||
except (IOError, os.error) as reason:
|
||||
export_error_label = _("Electrum was unable to produce a transaction export.")
|
||||
self.parent.show_critical(export_error_label + "\n" + str(reason), title=_("Unable to export history"))
|
||||
return
|
||||
self.parent.show_message(_("Your wallet history has been successfully exported."))
|
||||
|
||||
def do_export_history(self, file_name, is_csv):
|
||||
hist = self.wallet.get_full_history(domain=self.hm.get_domain(),
|
||||
from_timestamp=None,
|
||||
to_timestamp=None,
|
||||
fx=self.parent.fx,
|
||||
show_fees=True)
|
||||
txns = hist['transactions']
|
||||
lines = []
|
||||
if is_csv:
|
||||
for item in txns:
|
||||
lines.append([item['txid'],
|
||||
item.get('label', ''),
|
||||
item['confirmations'],
|
||||
item['value'],
|
||||
item.get('fiat_value', ''),
|
||||
item.get('fee', ''),
|
||||
item.get('fiat_fee', ''),
|
||||
item['date']])
|
||||
with open(file_name, "w+", encoding='utf-8') as f:
|
||||
if is_csv:
|
||||
import csv
|
||||
transaction = csv.writer(f, lineterminator='\n')
|
||||
transaction.writerow(["transaction_hash",
|
||||
"label",
|
||||
"confirmations",
|
||||
"value",
|
||||
"fiat_value",
|
||||
"fee",
|
||||
"fiat_fee",
|
||||
"timestamp"])
|
||||
for line in lines:
|
||||
transaction.writerow(line)
|
||||
else:
|
||||
from electrum.util import json_encode
|
||||
f.write(json_encode(txns))
|
||||
|
||||
def text_txid_from_coordinate(self, row, col):
|
||||
idx = self.model().mapToSource(self.model().index(row, col))
|
||||
tx_item = self.hm.transactions.value_from_pos(idx.row())
|
||||
return self.hm.data(idx, Qt.DisplayRole).value(), tx_item['txid']
|
||||
@ -1,115 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2015 Thomas Voegtlin
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from enum import IntEnum
|
||||
|
||||
from PyQt5.QtCore import Qt, QItemSelectionModel
|
||||
from PyQt5.QtGui import QStandardItemModel, QStandardItem, QFont
|
||||
from PyQt5.QtWidgets import QHeaderView, QMenu
|
||||
|
||||
from electrum.i18n import _
|
||||
from electrum.util import format_time
|
||||
|
||||
from .util import (MyTreeView, read_QIcon, MONOSPACE_FONT, PR_UNPAID,
|
||||
pr_tooltips, import_meta_gui, export_meta_gui, pr_icons)
|
||||
|
||||
|
||||
class InvoiceList(MyTreeView):
|
||||
|
||||
class Columns(IntEnum):
|
||||
DATE = 0
|
||||
REQUESTOR = 1
|
||||
DESCRIPTION = 2
|
||||
AMOUNT = 3
|
||||
STATUS = 4
|
||||
|
||||
headers = {
|
||||
Columns.DATE: _('Expires'),
|
||||
Columns.REQUESTOR: _('Requestor'),
|
||||
Columns.DESCRIPTION: _('Description'),
|
||||
Columns.AMOUNT: _('Amount'),
|
||||
Columns.STATUS: _('Status'),
|
||||
}
|
||||
filter_columns = [Columns.DATE, Columns.REQUESTOR, Columns.DESCRIPTION, Columns.AMOUNT]
|
||||
|
||||
def __init__(self, parent):
|
||||
super().__init__(parent, self.create_menu,
|
||||
stretch_column=self.Columns.DESCRIPTION,
|
||||
editable_columns=[])
|
||||
self.setSortingEnabled(True)
|
||||
self.setColumnWidth(self.Columns.REQUESTOR, 200)
|
||||
self.setModel(QStandardItemModel(self))
|
||||
self.update()
|
||||
|
||||
def update(self):
|
||||
inv_list = self.parent.invoices.unpaid_invoices()
|
||||
self.model().clear()
|
||||
self.update_headers(self.__class__.headers)
|
||||
self.header().setSectionResizeMode(self.Columns.REQUESTOR, QHeaderView.Interactive)
|
||||
for idx, pr in enumerate(inv_list):
|
||||
key = pr.get_id()
|
||||
status = self.parent.invoices.get_status(key)
|
||||
requestor = pr.get_requestor()
|
||||
exp = pr.get_expiration_date()
|
||||
date_str = format_time(exp) if exp else _('Never')
|
||||
labels = [date_str, requestor, pr.memo, self.parent.format_amount(pr.get_amount(), whitespaces=True), pr_tooltips.get(status,'')]
|
||||
items = [QStandardItem(e) for e in labels]
|
||||
self.set_editability(items)
|
||||
items[self.Columns.STATUS].setIcon(read_QIcon(pr_icons.get(status)))
|
||||
items[self.Columns.DATE].setData(key, role=Qt.UserRole)
|
||||
items[self.Columns.REQUESTOR].setFont(QFont(MONOSPACE_FONT))
|
||||
items[self.Columns.AMOUNT].setFont(QFont(MONOSPACE_FONT))
|
||||
self.model().insertRow(idx, items)
|
||||
self.selectionModel().select(self.model().index(0,0), QItemSelectionModel.SelectCurrent)
|
||||
if self.parent.isVisible():
|
||||
b = len(inv_list) > 0
|
||||
self.setVisible(b)
|
||||
self.parent.invoices_label.setVisible(b)
|
||||
|
||||
def import_invoices(self):
|
||||
import_meta_gui(self.parent, _('invoices'), self.parent.invoices.import_file, self.update)
|
||||
|
||||
def export_invoices(self):
|
||||
export_meta_gui(self.parent, _('invoices'), self.parent.invoices.export_file)
|
||||
|
||||
def create_menu(self, position):
|
||||
idx = self.indexAt(position)
|
||||
item = self.model().itemFromIndex(idx)
|
||||
item_col0 = self.model().itemFromIndex(idx.sibling(idx.row(), self.Columns.DATE))
|
||||
if not item or not item_col0:
|
||||
return
|
||||
key = item_col0.data(Qt.UserRole)
|
||||
column = idx.column()
|
||||
column_title = self.model().horizontalHeaderItem(column).text()
|
||||
column_data = item.text()
|
||||
status = self.parent.invoices.get_status(key)
|
||||
menu = QMenu(self)
|
||||
if column_data:
|
||||
menu.addAction(_("Copy {}").format(column_title), lambda: self.parent.app.clipboard().setText(column_data))
|
||||
menu.addAction(_("Details"), lambda: self.parent.show_invoice(key))
|
||||
if status == PR_UNPAID:
|
||||
menu.addAction(_("Pay Now"), lambda: self.parent.do_pay_invoice(key))
|
||||
menu.addAction(_("Delete"), lambda: self.parent.delete_invoice(key))
|
||||
menu.exec_(self.viewport().mapToGlobal(position))
|
||||
@ -1,141 +0,0 @@
|
||||
# Copyright (C) 2019 The Electrum developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file LICENCE or http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
from distutils.version import StrictVersion
|
||||
|
||||
from PyQt5.QtCore import Qt, QThread, pyqtSignal
|
||||
from PyQt5.QtWidgets import (QWidget, QVBoxLayout, QLabel, QProgressBar,
|
||||
QHBoxLayout, QPushButton)
|
||||
|
||||
from electrum import version
|
||||
from electrum import constants
|
||||
from electrum import ecc
|
||||
from electrum.i18n import _
|
||||
from electrum.util import PrintError, make_aiohttp_session
|
||||
|
||||
|
||||
class UpdateCheck(QWidget, PrintError):
|
||||
url = "https://electrum.org/version"
|
||||
download_url = "https://electrum.org/#download"
|
||||
|
||||
VERSION_ANNOUNCEMENT_SIGNING_KEYS = (
|
||||
"13xjmVAB1EATPP8RshTE8S8sNwwSUM9p1P",
|
||||
)
|
||||
|
||||
def __init__(self, main_window, latest_version=None):
|
||||
self.main_window = main_window
|
||||
QWidget.__init__(self)
|
||||
self.setWindowTitle('Electrum - ' + _('Update Check'))
|
||||
self.content = QVBoxLayout()
|
||||
self.content.setContentsMargins(*[10]*4)
|
||||
|
||||
self.heading_label = QLabel()
|
||||
self.content.addWidget(self.heading_label)
|
||||
|
||||
self.detail_label = QLabel()
|
||||
self.detail_label.setTextInteractionFlags(Qt.LinksAccessibleByMouse)
|
||||
self.detail_label.setOpenExternalLinks(True)
|
||||
self.content.addWidget(self.detail_label)
|
||||
|
||||
self.pb = QProgressBar()
|
||||
self.pb.setMaximum(0)
|
||||
self.pb.setMinimum(0)
|
||||
self.content.addWidget(self.pb)
|
||||
|
||||
versions = QHBoxLayout()
|
||||
versions.addWidget(QLabel(_("Current version: {}".format(version.ELECTRUM_VERSION))))
|
||||
self.latest_version_label = QLabel(_("Latest version: {}".format(" ")))
|
||||
versions.addWidget(self.latest_version_label)
|
||||
self.content.addLayout(versions)
|
||||
|
||||
self.update_view(latest_version)
|
||||
|
||||
self.update_check_thread = UpdateCheckThread(self.main_window)
|
||||
self.update_check_thread.checked.connect(self.on_version_retrieved)
|
||||
self.update_check_thread.failed.connect(self.on_retrieval_failed)
|
||||
self.update_check_thread.start()
|
||||
|
||||
close_button = QPushButton(_("Close"))
|
||||
close_button.clicked.connect(self.close)
|
||||
self.content.addWidget(close_button)
|
||||
self.setLayout(self.content)
|
||||
self.show()
|
||||
|
||||
def on_version_retrieved(self, version):
|
||||
self.update_view(version)
|
||||
|
||||
def on_retrieval_failed(self):
|
||||
self.heading_label.setText('<h2>' + _("Update check failed") + '</h2>')
|
||||
self.detail_label.setText(_("Sorry, but we were unable to check for updates. Please try again later."))
|
||||
self.pb.hide()
|
||||
|
||||
@staticmethod
|
||||
def is_newer(latest_version):
|
||||
return latest_version > StrictVersion(version.ELECTRUM_VERSION)
|
||||
|
||||
def update_view(self, latest_version=None):
|
||||
if latest_version:
|
||||
self.pb.hide()
|
||||
self.latest_version_label.setText(_("Latest version: {}".format(latest_version)))
|
||||
if self.is_newer(latest_version):
|
||||
self.heading_label.setText('<h2>' + _("There is a new update available") + '</h2>')
|
||||
url = "<a href='{u}'>{u}</a>".format(u=UpdateCheck.download_url)
|
||||
self.detail_label.setText(_("You can download the new version from {}.").format(url))
|
||||
else:
|
||||
self.heading_label.setText('<h2>' + _("Already up to date") + '</h2>')
|
||||
self.detail_label.setText(_("You are already on the latest version of Electrum."))
|
||||
else:
|
||||
self.heading_label.setText('<h2>' + _("Checking for updates...") + '</h2>')
|
||||
self.detail_label.setText(_("Please wait while Electrum checks for available updates."))
|
||||
|
||||
|
||||
class UpdateCheckThread(QThread, PrintError):
|
||||
checked = pyqtSignal(object)
|
||||
failed = pyqtSignal()
|
||||
|
||||
def __init__(self, main_window):
|
||||
super().__init__()
|
||||
self.main_window = main_window
|
||||
|
||||
async def get_update_info(self):
|
||||
async with make_aiohttp_session(proxy=self.main_window.network.proxy) as session:
|
||||
async with session.get(UpdateCheck.url) as result:
|
||||
signed_version_dict = await result.json(content_type=None)
|
||||
# example signed_version_dict:
|
||||
# {
|
||||
# "version": "3.9.9",
|
||||
# "signatures": {
|
||||
# "1Lqm1HphuhxKZQEawzPse8gJtgjm9kUKT4": "IA+2QG3xPRn4HAIFdpu9eeaCYC7S5wS/sDxn54LJx6BdUTBpse3ibtfq8C43M7M1VfpGkD5tsdwl5C6IfpZD/gQ="
|
||||
# }
|
||||
# }
|
||||
version_num = signed_version_dict['version']
|
||||
sigs = signed_version_dict['signatures']
|
||||
for address, sig in sigs.items():
|
||||
if address not in UpdateCheck.VERSION_ANNOUNCEMENT_SIGNING_KEYS:
|
||||
continue
|
||||
sig = base64.b64decode(sig)
|
||||
msg = version_num.encode('utf-8')
|
||||
if ecc.verify_message_with_address(address=address, sig65=sig, message=msg,
|
||||
net=constants.BitcoinMainnet):
|
||||
self.print_error(f"valid sig for version announcement '{version_num}' from address '{address}'")
|
||||
break
|
||||
else:
|
||||
raise Exception('no valid signature for version announcement')
|
||||
return StrictVersion(version_num.strip())
|
||||
|
||||
def run(self):
|
||||
network = self.main_window.network
|
||||
if not network:
|
||||
self.failed.emit()
|
||||
return
|
||||
try:
|
||||
update_info = asyncio.run_coroutine_threadsafe(self.get_update_info(), network.asyncio_loop).result()
|
||||
except Exception as e:
|
||||
#self.print_error(traceback.format_exc())
|
||||
self.print_error(f"got exception: '{repr(e)}'")
|
||||
self.failed.emit()
|
||||
else:
|
||||
self.checked.emit(update_info)
|
||||
@ -1,115 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2015 Thomas Voegtlin
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from typing import Optional, List
|
||||
from enum import IntEnum
|
||||
|
||||
from PyQt5.QtCore import Qt
|
||||
from PyQt5.QtGui import QStandardItemModel, QStandardItem, QFont
|
||||
from PyQt5.QtWidgets import QAbstractItemView, QMenu
|
||||
|
||||
from electrum.i18n import _
|
||||
|
||||
from .util import MyTreeView, ColorScheme, MONOSPACE_FONT
|
||||
|
||||
class UTXOList(MyTreeView):
|
||||
|
||||
class Columns(IntEnum):
|
||||
ADDRESS = 0
|
||||
LABEL = 1
|
||||
AMOUNT = 2
|
||||
HEIGHT = 3
|
||||
OUTPOINT = 4
|
||||
|
||||
headers = {
|
||||
Columns.ADDRESS: _('Address'),
|
||||
Columns.LABEL: _('Label'),
|
||||
Columns.AMOUNT: _('Amount'),
|
||||
Columns.HEIGHT: _('Height'),
|
||||
Columns.OUTPOINT: _('Output point'),
|
||||
}
|
||||
filter_columns = [Columns.ADDRESS, Columns.LABEL]
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent, self.create_menu,
|
||||
stretch_column=self.Columns.LABEL,
|
||||
editable_columns=[])
|
||||
self.setModel(QStandardItemModel(self))
|
||||
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
|
||||
self.setSortingEnabled(True)
|
||||
self.update()
|
||||
|
||||
def update(self):
|
||||
self.wallet = self.parent.wallet
|
||||
utxos = self.wallet.get_utxos()
|
||||
self.utxo_dict = {}
|
||||
self.model().clear()
|
||||
self.update_headers(self.__class__.headers)
|
||||
for idx, x in enumerate(utxos):
|
||||
self.insert_utxo(idx, x)
|
||||
|
||||
def insert_utxo(self, idx, x):
|
||||
address = x.get('address')
|
||||
height = x.get('height')
|
||||
name = x.get('prevout_hash') + ":%d"%x.get('prevout_n')
|
||||
name_short = x.get('prevout_hash')[:10] + '...' + ":%d"%x.get('prevout_n')
|
||||
self.utxo_dict[name] = x
|
||||
label = self.wallet.get_label(x.get('prevout_hash'))
|
||||
amount = self.parent.format_amount(x['value'], whitespaces=True)
|
||||
labels = [address, label, amount, '%d'%height, name_short]
|
||||
utxo_item = [QStandardItem(x) for x in labels]
|
||||
self.set_editability(utxo_item)
|
||||
utxo_item[self.Columns.ADDRESS].setFont(QFont(MONOSPACE_FONT))
|
||||
utxo_item[self.Columns.AMOUNT].setFont(QFont(MONOSPACE_FONT))
|
||||
utxo_item[self.Columns.OUTPOINT].setFont(QFont(MONOSPACE_FONT))
|
||||
utxo_item[self.Columns.ADDRESS].setData(name, Qt.UserRole)
|
||||
utxo_item[self.Columns.OUTPOINT].setToolTip(name)
|
||||
if self.wallet.is_frozen(address):
|
||||
utxo_item[self.Columns.ADDRESS].setBackground(ColorScheme.BLUE.as_color(True))
|
||||
self.model().insertRow(idx, utxo_item)
|
||||
|
||||
def selected_column_0_user_roles(self) -> Optional[List[str]]:
|
||||
if not self.model():
|
||||
return None
|
||||
items = self.selected_in_column(self.Columns.ADDRESS)
|
||||
if not items:
|
||||
return None
|
||||
return [x.data(Qt.UserRole) for x in items]
|
||||
|
||||
def create_menu(self, position):
|
||||
selected = self.selected_column_0_user_roles()
|
||||
if not selected:
|
||||
return
|
||||
menu = QMenu()
|
||||
coins = (self.utxo_dict[name] for name in selected)
|
||||
menu.addAction(_("Spend"), lambda: self.parent.spend_coins(coins))
|
||||
if len(selected) == 1:
|
||||
txid = selected[0].split(':')[0]
|
||||
tx = self.wallet.transactions.get(txid)
|
||||
if tx:
|
||||
label = self.wallet.get_label(txid) or None # Prefer None if empty (None hides the Description: field in the window)
|
||||
menu.addAction(_("Details"), lambda: self.parent.show_transaction(tx, label))
|
||||
|
||||
menu.exec_(self.viewport().mapToGlobal(position))
|
||||
@ -1,640 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Electrum - lightweight Bitcoin client
|
||||
# Copyright (C) 2011 thomasv@gitorious
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
import os
|
||||
import re
|
||||
import ssl
|
||||
import sys
|
||||
import traceback
|
||||
import asyncio
|
||||
from typing import Tuple, Union, List, TYPE_CHECKING, Optional
|
||||
from collections import defaultdict
|
||||
|
||||
import aiorpcx
|
||||
from aiorpcx import RPCSession, Notification
|
||||
import certifi
|
||||
|
||||
from .util import PrintError, ignore_exceptions, log_exceptions, bfh, SilentTaskGroup
|
||||
from . import util
|
||||
from . import x509
|
||||
from . import pem
|
||||
from . import version
|
||||
from . import blockchain
|
||||
from .blockchain import Blockchain
|
||||
from . import constants
|
||||
from .i18n import _
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .network import Network
|
||||
|
||||
|
||||
ca_path = certifi.where()
|
||||
|
||||
|
||||
class NetworkTimeout:
|
||||
# seconds
|
||||
class Generic:
|
||||
NORMAL = 30
|
||||
RELAXED = 45
|
||||
MOST_RELAXED = 180
|
||||
class Urgent(Generic):
|
||||
NORMAL = 10
|
||||
RELAXED = 20
|
||||
MOST_RELAXED = 60
|
||||
|
||||
class NotificationSession(RPCSession):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(NotificationSession, self).__init__(*args, **kwargs)
|
||||
self.subscriptions = defaultdict(list)
|
||||
self.cache = {}
|
||||
self.in_flight_requests_semaphore = asyncio.Semaphore(100)
|
||||
self.default_timeout = NetworkTimeout.Generic.NORMAL
|
||||
self._msg_counter = 0
|
||||
self.interface = None # type: Optional[Interface]
|
||||
|
||||
def _get_and_inc_msg_counter(self):
|
||||
# runs in event loop thread, no need for lock
|
||||
self._msg_counter += 1
|
||||
return self._msg_counter
|
||||
|
||||
async def handle_request(self, request):
|
||||
self.maybe_log(f"--> {request}")
|
||||
# note: if server sends malformed request and we raise, the superclass
|
||||
# will catch the exception, count errors, and at some point disconnect
|
||||
if isinstance(request, Notification):
|
||||
params, result = request.args[:-1], request.args[-1]
|
||||
key = self.get_hashable_key_for_rpc_call(request.method, params)
|
||||
if key in self.subscriptions:
|
||||
self.cache[key] = result
|
||||
for queue in self.subscriptions[key]:
|
||||
await queue.put(request.args)
|
||||
else:
|
||||
raise Exception('unexpected request: {}'.format(repr(request)))
|
||||
|
||||
async def send_request(self, *args, timeout=None, **kwargs):
|
||||
# note: the timeout starts after the request touches the wire!
|
||||
if timeout is None:
|
||||
timeout = self.default_timeout
|
||||
# note: the semaphore implementation guarantees no starvation
|
||||
async with self.in_flight_requests_semaphore:
|
||||
msg_id = self._get_and_inc_msg_counter()
|
||||
self.maybe_log(f"<-- {args} {kwargs} (id: {msg_id})")
|
||||
try:
|
||||
response = await asyncio.wait_for(
|
||||
super().send_request(*args, **kwargs),
|
||||
timeout)
|
||||
except asyncio.TimeoutError as e:
|
||||
raise RequestTimedOut(f'request timed out: {args} (id: {msg_id})') from e
|
||||
else:
|
||||
self.maybe_log(f"--> {response} (id: {msg_id})")
|
||||
return response
|
||||
|
||||
async def subscribe(self, method: str, params: List, queue: asyncio.Queue):
|
||||
# note: until the cache is written for the first time,
|
||||
# each 'subscribe' call might make a request on the network.
|
||||
key = self.get_hashable_key_for_rpc_call(method, params)
|
||||
self.subscriptions[key].append(queue)
|
||||
if key in self.cache:
|
||||
result = self.cache[key]
|
||||
else:
|
||||
result = await self.send_request(method, params)
|
||||
self.cache[key] = result
|
||||
await queue.put(params + [result])
|
||||
|
||||
def unsubscribe(self, queue):
|
||||
"""Unsubscribe a callback to free object references to enable GC."""
|
||||
# note: we can't unsubscribe from the server, so we keep receiving
|
||||
# subsequent notifications
|
||||
for v in self.subscriptions.values():
|
||||
if queue in v:
|
||||
v.remove(queue)
|
||||
|
||||
@classmethod
|
||||
def get_hashable_key_for_rpc_call(cls, method, params):
|
||||
"""Hashable index for subscriptions and cache"""
|
||||
return str(method) + repr(params)
|
||||
|
||||
def maybe_log(self, msg: str) -> None:
|
||||
if not self.interface: return
|
||||
if self.interface.debug or self.interface.network.debug:
|
||||
self.interface.print_error(msg)
|
||||
|
||||
|
||||
class GracefulDisconnect(Exception): pass
|
||||
|
||||
|
||||
class RequestTimedOut(GracefulDisconnect):
|
||||
def __str__(self):
|
||||
return _("Network request timed out.")
|
||||
|
||||
|
||||
class ErrorParsingSSLCert(Exception): pass
|
||||
class ErrorGettingSSLCertFromServer(Exception): pass
|
||||
|
||||
|
||||
def deserialize_server(server_str: str) -> Tuple[str, str, str]:
|
||||
# host might be IPv6 address, hence do rsplit:
|
||||
host, port, protocol = str(server_str).rsplit(':', 2)
|
||||
if not host:
|
||||
raise ValueError('host must not be empty')
|
||||
if protocol not in ('s', 't'):
|
||||
raise ValueError('invalid network protocol: {}'.format(protocol))
|
||||
int(port) # Throw if cannot be converted to int
|
||||
if not (0 < int(port) < 2**16):
|
||||
raise ValueError('port {} is out of valid range'.format(port))
|
||||
return host, port, protocol
|
||||
|
||||
|
||||
def serialize_server(host: str, port: Union[str, int], protocol: str) -> str:
|
||||
return str(':'.join([host, str(port), protocol]))
|
||||
|
||||
|
||||
class Interface(PrintError):
|
||||
verbosity_filter = 'i'
|
||||
|
||||
def __init__(self, network: 'Network', server: str, proxy: Optional[dict]):
|
||||
self.ready = asyncio.Future()
|
||||
self.got_disconnected = asyncio.Future()
|
||||
self.server = server
|
||||
self.host, self.port, self.protocol = deserialize_server(self.server)
|
||||
self.port = int(self.port)
|
||||
assert network.config.path
|
||||
self.cert_path = os.path.join(network.config.path, 'certs', self.host)
|
||||
self.blockchain = None
|
||||
self._requested_chunks = set()
|
||||
self.network = network
|
||||
self._set_proxy(proxy)
|
||||
self.session = None # type: NotificationSession
|
||||
|
||||
self.tip_header = None
|
||||
self.tip = 0
|
||||
|
||||
# Dump network messages (only for this interface). Set at runtime from the console.
|
||||
self.debug = False
|
||||
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.network.main_taskgroup.spawn(self.run()), self.network.asyncio_loop)
|
||||
self.group = SilentTaskGroup()
|
||||
|
||||
def diagnostic_name(self):
|
||||
return self.host
|
||||
|
||||
def _set_proxy(self, proxy: dict):
|
||||
if proxy:
|
||||
username, pw = proxy.get('user'), proxy.get('password')
|
||||
if not username or not pw:
|
||||
auth = None
|
||||
else:
|
||||
auth = aiorpcx.socks.SOCKSUserAuth(username, pw)
|
||||
if proxy['mode'] == "socks4":
|
||||
self.proxy = aiorpcx.socks.SOCKSProxy((proxy['host'], int(proxy['port'])), aiorpcx.socks.SOCKS4a, auth)
|
||||
elif proxy['mode'] == "socks5":
|
||||
self.proxy = aiorpcx.socks.SOCKSProxy((proxy['host'], int(proxy['port'])), aiorpcx.socks.SOCKS5, auth)
|
||||
else:
|
||||
raise NotImplementedError # http proxy not available with aiorpcx
|
||||
else:
|
||||
self.proxy = None
|
||||
|
||||
async def is_server_ca_signed(self, sslc):
|
||||
try:
|
||||
await self.open_session(sslc, exit_early=True)
|
||||
except ssl.SSLError as e:
|
||||
assert e.reason == 'CERTIFICATE_VERIFY_FAILED'
|
||||
return False
|
||||
return True
|
||||
|
||||
async def _try_saving_ssl_cert_for_first_time(self, ca_ssl_context):
|
||||
try:
|
||||
ca_signed = await self.is_server_ca_signed(ca_ssl_context)
|
||||
except (OSError, aiorpcx.socks.SOCKSError) as e:
|
||||
raise ErrorGettingSSLCertFromServer(e) from e
|
||||
if ca_signed:
|
||||
with open(self.cert_path, 'w') as f:
|
||||
# empty file means this is CA signed, not self-signed
|
||||
f.write('')
|
||||
else:
|
||||
await self.save_certificate()
|
||||
|
||||
def _is_saved_ssl_cert_available(self):
|
||||
if not os.path.exists(self.cert_path):
|
||||
return False
|
||||
with open(self.cert_path, 'r') as f:
|
||||
contents = f.read()
|
||||
if contents == '': # CA signed
|
||||
return True
|
||||
# pinned self-signed cert
|
||||
try:
|
||||
b = pem.dePem(contents, 'CERTIFICATE')
|
||||
except SyntaxError as e:
|
||||
self.print_error("error parsing already saved cert:", e)
|
||||
raise ErrorParsingSSLCert(e) from e
|
||||
try:
|
||||
x = x509.X509(b)
|
||||
except Exception as e:
|
||||
self.print_error("error parsing already saved cert:", e)
|
||||
raise ErrorParsingSSLCert(e) from e
|
||||
try:
|
||||
x.check_date()
|
||||
return True
|
||||
except x509.CertificateError as e:
|
||||
self.print_error("certificate has expired:", e)
|
||||
os.unlink(self.cert_path) # delete pinned cert only in this case
|
||||
return False
|
||||
|
||||
async def _get_ssl_context(self):
|
||||
if self.protocol != 's':
|
||||
# using plaintext TCP
|
||||
return None
|
||||
|
||||
# see if we already have cert for this server; or get it for the first time
|
||||
ca_sslc = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=ca_path)
|
||||
if not self._is_saved_ssl_cert_available():
|
||||
await self._try_saving_ssl_cert_for_first_time(ca_sslc)
|
||||
# now we have a file saved in our certificate store
|
||||
siz = os.stat(self.cert_path).st_size
|
||||
if siz == 0:
|
||||
# CA signed cert
|
||||
sslc = ca_sslc
|
||||
else:
|
||||
# pinned self-signed cert
|
||||
sslc = ssl.create_default_context(ssl.Purpose.SERVER_AUTH, cafile=self.cert_path)
|
||||
sslc.check_hostname = 0
|
||||
return sslc
|
||||
|
||||
def handle_disconnect(func):
|
||||
async def wrapper_func(self: 'Interface', *args, **kwargs):
|
||||
try:
|
||||
return await func(self, *args, **kwargs)
|
||||
except GracefulDisconnect as e:
|
||||
self.print_error("disconnecting gracefully. {}".format(repr(e)))
|
||||
finally:
|
||||
await self.network.connection_down(self)
|
||||
self.got_disconnected.set_result(1)
|
||||
return wrapper_func
|
||||
|
||||
@ignore_exceptions # do not kill main_taskgroup
|
||||
@log_exceptions
|
||||
@handle_disconnect
|
||||
async def run(self):
|
||||
try:
|
||||
ssl_context = await self._get_ssl_context()
|
||||
except (ErrorParsingSSLCert, ErrorGettingSSLCertFromServer) as e:
|
||||
self.print_error('disconnecting due to: {}'.format(repr(e)))
|
||||
return
|
||||
try:
|
||||
await self.open_session(ssl_context)
|
||||
except (asyncio.CancelledError, OSError, aiorpcx.socks.SOCKSError) as e:
|
||||
self.print_error('disconnecting due to: {}'.format(repr(e)))
|
||||
return
|
||||
|
||||
def mark_ready(self):
|
||||
if self.ready.cancelled():
|
||||
raise GracefulDisconnect('conn establishment was too slow; *ready* future was cancelled')
|
||||
if self.ready.done():
|
||||
return
|
||||
|
||||
assert self.tip_header
|
||||
chain = blockchain.check_header(self.tip_header)
|
||||
if not chain:
|
||||
self.blockchain = blockchain.get_best_chain()
|
||||
else:
|
||||
self.blockchain = chain
|
||||
assert self.blockchain is not None
|
||||
|
||||
self.print_error("set blockchain with height", self.blockchain.height())
|
||||
|
||||
self.ready.set_result(1)
|
||||
|
||||
async def save_certificate(self):
|
||||
if not os.path.exists(self.cert_path):
|
||||
# we may need to retry this a few times, in case the handshake hasn't completed
|
||||
for _ in range(10):
|
||||
dercert = await self.get_certificate()
|
||||
if dercert:
|
||||
self.print_error("succeeded in getting cert")
|
||||
with open(self.cert_path, 'w') as f:
|
||||
cert = ssl.DER_cert_to_PEM_cert(dercert)
|
||||
# workaround android bug
|
||||
cert = re.sub("([^\n])-----END CERTIFICATE-----","\\1\n-----END CERTIFICATE-----",cert)
|
||||
f.write(cert)
|
||||
# even though close flushes we can't fsync when closed.
|
||||
# and we must flush before fsyncing, cause flush flushes to OS buffer
|
||||
# fsync writes to OS buffer to disk
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
break
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
raise Exception("could not get certificate")
|
||||
|
||||
async def get_certificate(self):
|
||||
sslc = ssl.SSLContext()
|
||||
try:
|
||||
async with aiorpcx.Connector(RPCSession,
|
||||
host=self.host, port=self.port,
|
||||
ssl=sslc, proxy=self.proxy) as session:
|
||||
return session.transport._ssl_protocol._sslpipe._sslobj.getpeercert(True)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
async def get_block_header(self, height, assert_mode):
|
||||
self.print_error('requesting block header {} in mode {}'.format(height, assert_mode))
|
||||
# use lower timeout as we usually have network.bhi_lock here
|
||||
timeout = self.network.get_network_timeout_seconds(NetworkTimeout.Urgent)
|
||||
res = await self.session.send_request('blockchain.block.header', [height], timeout=timeout)
|
||||
return blockchain.deserialize_header(bytes.fromhex(res), height)
|
||||
|
||||
async def request_chunk(self, height, tip=None, *, can_return_early=False):
|
||||
index = height // 2016
|
||||
if can_return_early and index in self._requested_chunks:
|
||||
return
|
||||
self.print_error("requesting chunk from height {}".format(height))
|
||||
size = 2016
|
||||
if tip is not None:
|
||||
size = min(size, tip - index * 2016 + 1)
|
||||
size = max(size, 0)
|
||||
try:
|
||||
self._requested_chunks.add(index)
|
||||
res = await self.session.send_request('blockchain.block.headers', [index * 2016, size])
|
||||
finally:
|
||||
try: self._requested_chunks.remove(index)
|
||||
except KeyError: pass
|
||||
conn = self.blockchain.connect_chunk(index, res['hex'])
|
||||
if not conn:
|
||||
return conn, 0
|
||||
return conn, res['count']
|
||||
|
||||
async def open_session(self, sslc, exit_early=False):
|
||||
async with aiorpcx.Connector(NotificationSession,
|
||||
host=self.host, port=self.port,
|
||||
ssl=sslc, proxy=self.proxy) as session:
|
||||
self.session = session # type: NotificationSession
|
||||
self.session.interface = self
|
||||
self.session.default_timeout = self.network.get_network_timeout_seconds(NetworkTimeout.Generic)
|
||||
try:
|
||||
ver = await session.send_request('server.version', [version.ELECTRUM_VERSION, version.PROTOCOL_VERSION])
|
||||
except aiorpcx.jsonrpc.RPCError as e:
|
||||
raise GracefulDisconnect(e) # probably 'unsupported protocol version'
|
||||
if exit_early:
|
||||
return
|
||||
self.print_error("connection established. version: {}".format(ver))
|
||||
|
||||
async with self.group as group:
|
||||
await group.spawn(self.ping)
|
||||
await group.spawn(self.run_fetch_blocks)
|
||||
await group.spawn(self.monitor_connection)
|
||||
# NOTE: group.__aexit__ will be called here; this is needed to notice exceptions in the group!
|
||||
|
||||
async def monitor_connection(self):
|
||||
while True:
|
||||
await asyncio.sleep(1)
|
||||
if not self.session or self.session.is_closing():
|
||||
raise GracefulDisconnect('server closed session')
|
||||
|
||||
async def ping(self):
|
||||
while True:
|
||||
await asyncio.sleep(300)
|
||||
await self.session.send_request('server.ping')
|
||||
|
||||
async def close(self):
|
||||
if self.session:
|
||||
await self.session.close()
|
||||
# monitor_connection will cancel tasks
|
||||
|
||||
async def run_fetch_blocks(self):
|
||||
header_queue = asyncio.Queue()
|
||||
await self.session.subscribe('blockchain.headers.subscribe', [], header_queue)
|
||||
while True:
|
||||
item = await header_queue.get()
|
||||
raw_header = item[0]
|
||||
height = raw_header['height']
|
||||
header = blockchain.deserialize_header(bfh(raw_header['hex']), height)
|
||||
self.tip_header = header
|
||||
self.tip = height
|
||||
if self.tip < constants.net.max_checkpoint():
|
||||
raise GracefulDisconnect('server tip below max checkpoint')
|
||||
self.mark_ready()
|
||||
await self._process_header_at_tip()
|
||||
self.network.trigger_callback('network_updated')
|
||||
await self.network.switch_unwanted_fork_interface()
|
||||
await self.network.switch_lagging_interface()
|
||||
|
||||
async def _process_header_at_tip(self):
|
||||
height, header = self.tip, self.tip_header
|
||||
async with self.network.bhi_lock:
|
||||
if self.blockchain.height() >= height and self.blockchain.check_header(header):
|
||||
# another interface amended the blockchain
|
||||
self.print_error("skipping header", height)
|
||||
return
|
||||
_, height = await self.step(height, header)
|
||||
# in the simple case, height == self.tip+1
|
||||
if height <= self.tip:
|
||||
await self.sync_until(height)
|
||||
self.network.trigger_callback('blockchain_updated')
|
||||
|
||||
async def sync_until(self, height, next_height=None):
|
||||
if next_height is None:
|
||||
next_height = self.tip
|
||||
last = None
|
||||
while last is None or height <= next_height:
|
||||
prev_last, prev_height = last, height
|
||||
if next_height > height + 10:
|
||||
could_connect, num_headers = await self.request_chunk(height, next_height)
|
||||
if not could_connect:
|
||||
if height <= constants.net.max_checkpoint():
|
||||
raise GracefulDisconnect('server chain conflicts with checkpoints or genesis')
|
||||
last, height = await self.step(height)
|
||||
continue
|
||||
self.network.trigger_callback('network_updated')
|
||||
height = (height // 2016 * 2016) + num_headers
|
||||
assert height <= next_height+1, (height, self.tip)
|
||||
last = 'catchup'
|
||||
else:
|
||||
last, height = await self.step(height)
|
||||
assert (prev_last, prev_height) != (last, height), 'had to prevent infinite loop in interface.sync_until'
|
||||
return last, height
|
||||
|
||||
async def step(self, height, header=None):
|
||||
assert 0 <= height <= self.tip, (height, self.tip)
|
||||
if header is None:
|
||||
header = await self.get_block_header(height, 'catchup')
|
||||
|
||||
chain = blockchain.check_header(header) if 'mock' not in header else header['mock']['check'](header)
|
||||
if chain:
|
||||
self.blockchain = chain if isinstance(chain, Blockchain) else self.blockchain
|
||||
# note: there is an edge case here that is not handled.
|
||||
# we might know the blockhash (enough for check_header) but
|
||||
# not have the header itself. e.g. regtest chain with only genesis.
|
||||
# this situation resolves itself on the next block
|
||||
return 'catchup', height+1
|
||||
|
||||
can_connect = blockchain.can_connect(header) if 'mock' not in header else header['mock']['connect'](height)
|
||||
if not can_connect:
|
||||
self.print_error("can't connect", height)
|
||||
height, header, bad, bad_header = await self._search_headers_backwards(height, header)
|
||||
chain = blockchain.check_header(header) if 'mock' not in header else header['mock']['check'](header)
|
||||
can_connect = blockchain.can_connect(header) if 'mock' not in header else header['mock']['connect'](height)
|
||||
assert chain or can_connect
|
||||
if can_connect:
|
||||
self.print_error("could connect", height)
|
||||
height += 1
|
||||
if isinstance(can_connect, Blockchain): # not when mocking
|
||||
self.blockchain = can_connect
|
||||
self.blockchain.save_header(header)
|
||||
return 'catchup', height
|
||||
|
||||
good, bad, bad_header = await self._search_headers_binary(height, bad, bad_header, chain)
|
||||
return await self._resolve_potential_chain_fork_given_forkpoint(good, bad, bad_header)
|
||||
|
||||
async def _search_headers_binary(self, height, bad, bad_header, chain):
|
||||
assert bad == bad_header['block_height']
|
||||
_assert_header_does_not_check_against_any_chain(bad_header)
|
||||
|
||||
self.blockchain = chain if isinstance(chain, Blockchain) else self.blockchain
|
||||
good = height
|
||||
while True:
|
||||
assert good < bad, (good, bad)
|
||||
height = (good + bad) // 2
|
||||
self.print_error("binary step. good {}, bad {}, height {}".format(good, bad, height))
|
||||
header = await self.get_block_header(height, 'binary')
|
||||
chain = blockchain.check_header(header) if 'mock' not in header else header['mock']['check'](header)
|
||||
if chain:
|
||||
self.blockchain = chain if isinstance(chain, Blockchain) else self.blockchain
|
||||
good = height
|
||||
else:
|
||||
bad = height
|
||||
bad_header = header
|
||||
if good + 1 == bad:
|
||||
break
|
||||
|
||||
mock = 'mock' in bad_header and bad_header['mock']['connect'](height)
|
||||
real = not mock and self.blockchain.can_connect(bad_header, check_height=False)
|
||||
if not real and not mock:
|
||||
raise Exception('unexpected bad header during binary: {}'.format(bad_header))
|
||||
_assert_header_does_not_check_against_any_chain(bad_header)
|
||||
|
||||
self.print_error("binary search exited. good {}, bad {}".format(good, bad))
|
||||
return good, bad, bad_header
|
||||
|
||||
async def _resolve_potential_chain_fork_given_forkpoint(self, good, bad, bad_header):
|
||||
assert good + 1 == bad
|
||||
assert bad == bad_header['block_height']
|
||||
_assert_header_does_not_check_against_any_chain(bad_header)
|
||||
# 'good' is the height of a block 'good_header', somewhere in self.blockchain.
|
||||
# bad_header connects to good_header; bad_header itself is NOT in self.blockchain.
|
||||
|
||||
bh = self.blockchain.height()
|
||||
assert bh >= good, (bh, good)
|
||||
if bh == good:
|
||||
height = good + 1
|
||||
self.print_error("catching up from {}".format(height))
|
||||
return 'no_fork', height
|
||||
|
||||
# this is a new fork we don't yet have
|
||||
height = bad + 1
|
||||
self.print_error(f"new fork at bad height {bad}")
|
||||
forkfun = self.blockchain.fork if 'mock' not in bad_header else bad_header['mock']['fork']
|
||||
b = forkfun(bad_header) # type: Blockchain
|
||||
self.blockchain = b
|
||||
assert b.forkpoint == bad
|
||||
return 'fork', height
|
||||
|
||||
async def _search_headers_backwards(self, height, header):
|
||||
async def iterate():
|
||||
nonlocal height, header
|
||||
checkp = False
|
||||
if height <= constants.net.max_checkpoint():
|
||||
height = constants.net.max_checkpoint()
|
||||
checkp = True
|
||||
header = await self.get_block_header(height, 'backward')
|
||||
chain = blockchain.check_header(header) if 'mock' not in header else header['mock']['check'](header)
|
||||
can_connect = blockchain.can_connect(header) if 'mock' not in header else header['mock']['connect'](height)
|
||||
if chain or can_connect:
|
||||
return False
|
||||
if checkp:
|
||||
raise GracefulDisconnect("server chain conflicts with checkpoints")
|
||||
return True
|
||||
|
||||
bad, bad_header = height, header
|
||||
_assert_header_does_not_check_against_any_chain(bad_header)
|
||||
with blockchain.blockchains_lock: chains = list(blockchain.blockchains.values())
|
||||
local_max = max([0] + [x.height() for x in chains]) if 'mock' not in header else float('inf')
|
||||
height = min(local_max + 1, height - 1)
|
||||
while await iterate():
|
||||
bad, bad_header = height, header
|
||||
delta = self.tip - height
|
||||
height = self.tip - 2 * delta
|
||||
|
||||
_assert_header_does_not_check_against_any_chain(bad_header)
|
||||
self.print_error("exiting backward mode at", height)
|
||||
return height, header, bad, bad_header
|
||||
|
||||
|
||||
def _assert_header_does_not_check_against_any_chain(header: dict) -> None:
|
||||
chain_bad = blockchain.check_header(header) if 'mock' not in header else header['mock']['check'](header)
|
||||
if chain_bad:
|
||||
raise Exception('bad_header must not check!')
|
||||
|
||||
|
||||
def check_cert(host, cert):
|
||||
try:
|
||||
b = pem.dePem(cert, 'CERTIFICATE')
|
||||
x = x509.X509(b)
|
||||
except:
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
return
|
||||
|
||||
try:
|
||||
x.check_date()
|
||||
expired = False
|
||||
except:
|
||||
expired = True
|
||||
|
||||
m = "host: %s\n"%host
|
||||
m += "has_expired: %s\n"% expired
|
||||
util.print_msg(m)
|
||||
|
||||
|
||||
# Used by tests
|
||||
def _match_hostname(name, val):
|
||||
if val == name:
|
||||
return True
|
||||
|
||||
return val.startswith('*.') and name.endswith(val[1:])
|
||||
|
||||
|
||||
def test_certificates():
|
||||
from .simple_config import SimpleConfig
|
||||
config = SimpleConfig()
|
||||
mydir = os.path.join(config.path, "certs")
|
||||
certs = os.listdir(mydir)
|
||||
for c in certs:
|
||||
p = os.path.join(mydir,c)
|
||||
with open(p, encoding='utf-8') as f:
|
||||
cert = f.read()
|
||||
check_cert(c, cert)
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_certificates()
|
||||