diff --git a/app/routes.py b/app/routes.py index cae42c8..ec9bf9d 100644 --- a/app/routes.py +++ b/app/routes.py @@ -5,7 +5,7 @@ import sqlite3 import json import os -dbfolder = '/home/vivek/Desktop/flo-token-tracking-backup/flo-token-tracking' +dbfolder = '/home/production/deployed/flo-token-tracking' @app.route('/') diff --git a/ftt-ssetest b/ftt-ssetest new file mode 160000 index 0000000..fb9492e --- /dev/null +++ b/ftt-ssetest @@ -0,0 +1 @@ +Subproject commit fb9492e138bae9627252e5b8156532def30c6be7 diff --git a/gitupload.sh b/gitupload.sh new file mode 100644 index 0000000..11fa7cf --- /dev/null +++ b/gitupload.sh @@ -0,0 +1,7 @@ +echo "# tokenexplorer-historic" >> README.md +git init +git add README.md +git commit -m "first commit" +git branch -M main +git remote add origin https://github.com/ranchimall/tokenexplorer-historic.git +git push -u origin main diff --git a/py3/bin/activate b/py3/bin/activate new file mode 100644 index 0000000..256b85d --- /dev/null +++ b/py3/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "$1" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/home/production/deployed/flo-token-explorer/py3" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + if [ "x(py3) " != x ] ; then + PS1="(py3) ${PS1:-}" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r +fi diff --git a/py3/bin/activate.csh b/py3/bin/activate.csh new file mode 100644 index 0000000..abdbe1d --- /dev/null +++ b/py3/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/production/deployed/flo-token-explorer/py3" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("py3" != "") then + set env_name = "py3" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/py3/bin/activate.fish b/py3/bin/activate.fish new file mode 100644 index 0000000..ad5aea0 --- /dev/null +++ b/py3/bin/activate.fish @@ -0,0 +1,75 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelevant variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/production/deployed/flo-token-explorer/py3" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + functions -c fish_prompt _old_fish_prompt + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command + set -l old_status $status + + # Prompt override? + if test -n "(py3) " + printf "%s%s" "(py3) " (set_color normal) + else + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) + else + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) + end + end + + # Restore the return status of the previous command. + echo "exit $old_status" | . + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/py3/bin/alembic b/py3/bin/alembic new file mode 100755 index 0000000..7de2a77 --- /dev/null +++ b/py3/bin/alembic @@ -0,0 +1,12 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'alembic==1.0.11','console_scripts','alembic' +__requires__ = 'alembic==1.0.11' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('alembic==1.0.11', 'console_scripts', 'alembic')() + ) diff --git a/py3/bin/chardetect b/py3/bin/chardetect new file mode 100755 index 0000000..a9011ea --- /dev/null +++ b/py3/bin/chardetect @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from chardet.cli.chardetect import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/bin/easy_install b/py3/bin/easy_install new file mode 100755 index 0000000..7fc1889 --- /dev/null +++ b/py3/bin/easy_install @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/bin/easy_install-3.6 b/py3/bin/easy_install-3.6 new file mode 100755 index 0000000..7fc1889 --- /dev/null +++ b/py3/bin/easy_install-3.6 @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/bin/flask b/py3/bin/flask new file mode 100755 index 0000000..1e00f9d --- /dev/null +++ b/py3/bin/flask @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from flask.cli import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/bin/gunicorn b/py3/bin/gunicorn new file mode 100755 index 0000000..8e7efd2 --- /dev/null +++ b/py3/bin/gunicorn @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from gunicorn.app.wsgiapp import run + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(run()) diff --git a/py3/bin/gunicorn_paster b/py3/bin/gunicorn_paster new file mode 100755 index 0000000..68beb47 --- /dev/null +++ b/py3/bin/gunicorn_paster @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from gunicorn.app.pasterapp import run + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(run()) diff --git a/py3/bin/hypercorn b/py3/bin/hypercorn new file mode 100755 index 0000000..b2e6dc6 --- /dev/null +++ b/py3/bin/hypercorn @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from hypercorn.__main__ import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/bin/mako-render b/py3/bin/mako-render new file mode 100755 index 0000000..4bb1ccf --- /dev/null +++ b/py3/bin/mako-render @@ -0,0 +1,12 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'Mako==1.1.0','console_scripts','mako-render' +__requires__ = 'Mako==1.1.0' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('Mako==1.1.0', 'console_scripts', 'mako-render')() + ) diff --git a/py3/bin/pip b/py3/bin/pip new file mode 100755 index 0000000..bfb8ffd --- /dev/null +++ b/py3/bin/pip @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from pip import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/bin/pip3 b/py3/bin/pip3 new file mode 100755 index 0000000..bfb8ffd --- /dev/null +++ b/py3/bin/pip3 @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from pip import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/bin/pip3.6 b/py3/bin/pip3.6 new file mode 100755 index 0000000..bfb8ffd --- /dev/null +++ b/py3/bin/pip3.6 @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from pip import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/bin/python b/py3/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/py3/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/py3/bin/python3 b/py3/bin/python3 new file mode 120000 index 0000000..ae65fda --- /dev/null +++ b/py3/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/py3/bin/quart b/py3/bin/quart new file mode 100755 index 0000000..4abb1aa --- /dev/null +++ b/py3/bin/quart @@ -0,0 +1,11 @@ +#!/home/production/deployed/flo-token-explorer/py3/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from quart.cli import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/py3/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..87ce152 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt @@ -0,0 +1,39 @@ +Copyright © 2014 by the Pallets team. + +Some rights reserved. + +Redistribution and use in source and binary forms of the software as +well as documentation, with or without modification, are permitted +provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +- Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + +---- + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright © 2001-2006 Gregory P. Ward. All rights reserved. +Copyright © 2002-2006 Python Software Foundation. All rights reserved. diff --git a/py3/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA new file mode 100644 index 0000000..625bdad --- /dev/null +++ b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA @@ -0,0 +1,121 @@ +Metadata-Version: 2.1 +Name: Click +Version: 7.0 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets Team +Maintainer-email: contact@palletsprojects.com +License: BSD +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/click +Project-URL: Issue tracker, https://github.com/pallets/click/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install click + +Click supports Python 3.4 and newer, Python 2.7, and PyPy. + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +A Simple Example +---------------- + +What does it look like? Here is an example of a simple Click program: + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", + help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo("Hello, %s!" % name) + + if __name__ == '__main__': + hello() + +And what it looks like when run: + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +* Website: https://palletsprojects.com/p/click/ +* Documentation: https://click.palletsprojects.com/ +* License: `BSD `_ +* Releases: https://pypi.org/project/click/ +* Code: https://github.com/pallets/click +* Issue tracker: https://github.com/pallets/click/issues +* Test status: + + * Linux, Mac: https://travis-ci.org/pallets/click + * Windows: https://ci.appveyor.com/project/pallets/click + +* Test coverage: https://codecov.io/gh/pallets/click + + diff --git a/py3/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD new file mode 100644 index 0000000..e3c80c9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD @@ -0,0 +1,40 @@ +Click-7.0.dist-info/LICENSE.txt,sha256=4hIxn676T0Wcisk3_chVcECjyrivKTZsoqSNI5AlIlw,1876 +Click-7.0.dist-info/METADATA,sha256=-r8jeke3Zer4diRvT1MjFZuiJ6yTT_qFP39svLqdaLI,3516 +Click-7.0.dist-info/RECORD,, +Click-7.0.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +Click-7.0.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=HjGThQ7tef9kkwCV371TBnrf0SAi6fKfU_jtEnbYTvQ,2789 +click/_bashcomplete.py,sha256=iaNUmtxag0YPfxba3TDYCNietiTMQIrvhRLj-H8okFU,11014 +click/_compat.py,sha256=vYmvoj4opPxo-c-2GMQQjYT_r_QkOKybkfGoeVrt0dA,23399 +click/_termui_impl.py,sha256=xHmLtOJhKUCVD6168yucJ9fknUJPAMs0eUTPgVUO-GQ,19611 +click/_textwrap.py,sha256=gwS4m7bdQiJnzaDG8osFcRb-5vn4t4l2qSCy-5csCEc,1198 +click/_unicodefun.py,sha256=QHy2_5jYlX-36O-JVrTHNnHOqg8tquUR0HmQFev7Ics,4364 +click/_winconsole.py,sha256=PPWVak8Iikm_gAPsxMrzwsVFCvHgaW3jPaDWZ1JBl3U,8965 +click/core.py,sha256=q8FLcDZsagBGSRe5Y9Hi_FGvAeZvusNfoO5EkhkSQ8Y,75305 +click/decorators.py,sha256=idKt6duLUUfAFftrHoREi8MJSd39XW36pUVHthdglwk,11226 +click/exceptions.py,sha256=CNpAjBAE7qjaV4WChxQeak95e5yUOau8AsvT-8m6wss,7663 +click/formatting.py,sha256=eh-cypTUAhpI3HD-K4ZpR3vCiURIO62xXvKkR3tNUTM,8889 +click/globals.py,sha256=oQkou3ZQ5DgrbVM6BwIBirwiqozbjfirzsLGAlLRRdg,1514 +click/parser.py,sha256=m-nGZz4VwprM42_qtFlWFGo7yRJQxkBlRcZodoH593Y,15510 +click/termui.py,sha256=o_ZXB2jyvL2Rce7P_bFGq452iyBq9ykJyRApIPMCZO0,23207 +click/testing.py,sha256=aYGqY_iWLu2p4k7lkuJ6t3fqpf6aPGqTsyLzNY_ngKg,13062 +click/types.py,sha256=2Q929p-aBP_ZYuMFJqJR-Ipucofv3fmDc5JzBDPmzJU,23287 +click/utils.py,sha256=6-D0WkAxvv9FkgHXSHwDIv0l9Gdx9Mm6Z5vuKNLIfZI,15763 +Click-7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click/__pycache__/_textwrap.cpython-36.pyc,, +click/__pycache__/testing.cpython-36.pyc,, +click/__pycache__/_unicodefun.cpython-36.pyc,, +click/__pycache__/_bashcomplete.cpython-36.pyc,, +click/__pycache__/_compat.cpython-36.pyc,, +click/__pycache__/globals.cpython-36.pyc,, +click/__pycache__/_winconsole.cpython-36.pyc,, +click/__pycache__/parser.cpython-36.pyc,, +click/__pycache__/_termui_impl.cpython-36.pyc,, +click/__pycache__/types.cpython-36.pyc,, +click/__pycache__/termui.cpython-36.pyc,, +click/__pycache__/formatting.cpython-36.pyc,, +click/__pycache__/decorators.cpython-36.pyc,, +click/__pycache__/core.cpython-36.pyc,, +click/__pycache__/__init__.cpython-36.pyc,, +click/__pycache__/utils.cpython-36.pyc,, +click/__pycache__/exceptions.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt new file mode 100644 index 0000000..dca9a90 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/LICENSE.rst b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/LICENSE.rst new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/METADATA b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/METADATA new file mode 100644 index 0000000..08fcc91 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/METADATA @@ -0,0 +1,134 @@ +Metadata-Version: 2.1 +Name: Flask +Version: 1.1.1 +Summary: A simple framework for building complex web applications. +Home-page: https://palletsprojects.com/p/flask/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Documentation, https://flask.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/flask +Project-URL: Issue tracker, https://github.com/pallets/flask/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +Requires-Dist: Werkzeug (>=0.15) +Requires-Dist: Jinja2 (>=2.10.1) +Requires-Dist: itsdangerous (>=0.24) +Requires-Dist: click (>=5.1) +Provides-Extra: dev +Requires-Dist: pytest ; extra == 'dev' +Requires-Dist: coverage ; extra == 'dev' +Requires-Dist: tox ; extra == 'dev' +Requires-Dist: sphinx ; extra == 'dev' +Requires-Dist: pallets-sphinx-themes ; extra == 'dev' +Requires-Dist: sphinxcontrib-log-cabinet ; extra == 'dev' +Requires-Dist: sphinx-issues ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: pallets-sphinx-themes ; extra == 'docs' +Requires-Dist: sphinxcontrib-log-cabinet ; extra == 'docs' +Requires-Dist: sphinx-issues ; extra == 'docs' +Provides-Extra: dotenv +Requires-Dist: python-dotenv ; extra == 'dotenv' + +Flask +===== + +Flask is a lightweight `WSGI`_ web application framework. It is designed +to make getting started quick and easy, with the ability to scale up to +complex applications. It began as a simple wrapper around `Werkzeug`_ +and `Jinja`_ and has become one of the most popular Python web +application frameworks. + +Flask offers suggestions, but doesn't enforce any dependencies or +project layout. It is up to the developer to choose the tools and +libraries they want to use. There are many extensions provided by the +community that make adding new functionality easy. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Flask + + +A Simple Example +---------------- + +.. code-block:: python + + from flask import Flask + + app = Flask(__name__) + + @app.route("/") + def hello(): + return "Hello, World!" + +.. code-block:: text + + $ env FLASK_APP=hello.py flask run + * Serving Flask app "hello" + * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) + + +Contributing +------------ + +For guidance on setting up a development environment and how to make a +contribution to Flask, see the `contributing guidelines`_. + +.. _contributing guidelines: https://github.com/pallets/flask/blob/master/CONTRIBUTING.rst + + +Donate +------ + +The Pallets organization develops and supports Flask and the libraries +it uses. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://psfmember.org/civicrm/contribute/transact?reset=1&id=20 + + +Links +----- + +* Website: https://palletsprojects.com/p/flask/ +* Documentation: https://flask.palletsprojects.com/ +* Releases: https://pypi.org/project/Flask/ +* Code: https://github.com/pallets/flask +* Issue tracker: https://github.com/pallets/flask/issues +* Test status: https://dev.azure.com/pallets/flask/_build +* Official chat: https://discord.gg/t6rrQZH + +.. _WSGI: https://wsgi.readthedocs.io +.. _Werkzeug: https://www.palletsprojects.com/p/werkzeug/ +.. _Jinja: https://www.palletsprojects.com/p/jinja/ +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + diff --git a/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/RECORD b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/RECORD new file mode 100644 index 0000000..8dc1ded --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/RECORD @@ -0,0 +1,48 @@ +flask/__init__.py,sha256=qaBW4gy9Xxmdc3ygYO0_H214H1VpF7fq8xRR4XbqRjE,1894 +flask/__main__.py,sha256=fjVtt3QTANXlpJCOv3Ha7d5H-76MwzSIOab7SFD9TEk,254 +flask/_compat.py,sha256=8KPT54Iig96TuLipdogLRHNYToIcg-xPhnSV5VRERnw,4099 +flask/app.py,sha256=gLZInxueeQ9dkBo1wrntZ-bZqiDT4rYxy_AQ1xraFDc,98066 +flask/blueprints.py,sha256=vkdm8NusGsfZUeIfPdCluj733QFmiQcT4Sk1tuZLUjw,21400 +flask/cli.py,sha256=_WhPG1bggNdrP0QO95Vex6VJpDqTsVK0z54Y5poljKU,30933 +flask/config.py,sha256=3dejvQRYfNHw_V7dCLMxU8UNFpL34xIKemN7gHZIZ8Y,10052 +flask/ctx.py,sha256=cks-omGedkxawHFo6bKIrdOHsJCAgg1i_NWw_htxb5U,16724 +flask/debughelpers.py,sha256=-whvPKuAoU8AZ9c1z_INuOeBgfYDqE1J2xNBsoriugU,6475 +flask/globals.py,sha256=OgcHb6_NCyX6-TldciOdKcyj4PNfyQwClxdMhvov6aA,1637 +flask/helpers.py,sha256=x2Pa85R5dV6uA5f5423JTb6x4u6ZaMGf8sfosUZ76dQ,43004 +flask/logging.py,sha256=WcY5UkqTysGfmosyygSlXyZYGwOp3y-VsE6ehoJ48dk,3250 +flask/sessions.py,sha256=G0KsEkr_i1LG_wOINwFSOW3ts7Xbv4bNgEZKc7TRloc,14360 +flask/signals.py,sha256=yYLOed2x8WnQ7pirGalQYfpYpCILJ0LJhmNSrnWvjqw,2212 +flask/templating.py,sha256=F8E_IZXn9BGsjMzUJ5N_ACMyZdiFBp_SSEaUunvfZ7g,4939 +flask/testing.py,sha256=b0QaEejx0UcXqfSFP43k5W57bTVeDyrNK3uPD8JUpCk,10146 +flask/views.py,sha256=eeWnadLAj0QdQPLtjKipDetRZyG62CT2y7fNOFDJz0g,5802 +flask/wrappers.py,sha256=kgsvtZuMM6RQaDqhRbc5Pcj9vqTnaERl2pmXcdGL7LU,4736 +flask/json/__init__.py,sha256=6nITbZYiYOPB8Qfi1-dvsblwn01KRz8VOsMBIZyaYek,11988 +flask/json/tag.py,sha256=vq9GOllg_0kTWKuVFrwmkeOQzR-jdBD23x-89JyCCQI,8306 +Flask-1.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +Flask-1.1.1.dist-info/METADATA,sha256=Ht4R6TpTKOaXOmmQHhEF3A0Obpzde2Ai0kzNdu6-VWQ,4400 +Flask-1.1.1.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +Flask-1.1.1.dist-info/entry_points.txt,sha256=gBLA1aKg0OYR8AhbAfg8lnburHtKcgJLDU52BBctN0k,42 +Flask-1.1.1.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 +Flask-1.1.1.dist-info/RECORD,, +../../../bin/flask,sha256=pdYaa4l5zP9f2RMkhB39oDakhsdhb6eX0yAzDp74UIk,256 +Flask-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +flask/__pycache__/testing.cpython-36.pyc,, +flask/__pycache__/helpers.cpython-36.pyc,, +flask/__pycache__/_compat.cpython-36.pyc,, +flask/__pycache__/blueprints.cpython-36.pyc,, +flask/__pycache__/wrappers.cpython-36.pyc,, +flask/__pycache__/config.cpython-36.pyc,, +flask/__pycache__/globals.cpython-36.pyc,, +flask/__pycache__/views.cpython-36.pyc,, +flask/__pycache__/__main__.cpython-36.pyc,, +flask/__pycache__/ctx.cpython-36.pyc,, +flask/__pycache__/app.cpython-36.pyc,, +flask/__pycache__/sessions.cpython-36.pyc,, +flask/__pycache__/__init__.cpython-36.pyc,, +flask/__pycache__/logging.cpython-36.pyc,, +flask/__pycache__/templating.cpython-36.pyc,, +flask/__pycache__/signals.cpython-36.pyc,, +flask/__pycache__/debughelpers.cpython-36.pyc,, +flask/__pycache__/cli.cpython-36.pyc,, +flask/json/__pycache__/tag.cpython-36.pyc,, +flask/json/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/WHEEL new file mode 100644 index 0000000..78e6f69 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/entry_points.txt b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/entry_points.txt new file mode 100644 index 0000000..1eb0252 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +flask = flask.cli:main + diff --git a/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/top_level.txt new file mode 100644 index 0000000..7e10602 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask-1.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +flask diff --git a/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/LICENSE b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/LICENSE new file mode 100644 index 0000000..2448fd2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013 Miguel Grinberg + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/METADATA b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/METADATA new file mode 100644 index 0000000..27ca7a7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/METADATA @@ -0,0 +1,29 @@ +Metadata-Version: 2.1 +Name: Flask-Migrate +Version: 2.5.2 +Summary: SQLAlchemy database migrations for Flask applications using Alembic +Home-page: http://github.com/miguelgrinberg/flask-migrate/ +Author: Miguel Grinberg +Author-email: miguelgrinberg50@gmail.com +License: MIT +Platform: any +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Dist: Flask (>=0.9) +Requires-Dist: Flask-SQLAlchemy (>=1.0) +Requires-Dist: alembic (>=0.7) + + +Flask-Migrate +-------------- + +SQLAlchemy database migrations for Flask applications using Alembic. + + diff --git a/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/RECORD b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/RECORD new file mode 100644 index 0000000..ace5bc3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/RECORD @@ -0,0 +1,21 @@ +flask_migrate/__init__.py,sha256=Z8xs6oPoj7bkGQeYBWB-hwaFFfmTfMcwes77O9QmvHg,19308 +flask_migrate/cli.py,sha256=ZdLHsiedXuyqCq-Ey0fLqPSMi8fb1xJm97pWx_f_Ff4,9598 +flask_migrate/templates/flask/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38 +flask_migrate/templates/flask/alembic.ini.mako,sha256=zQU53x-FQXAbtuOxp3_hgtsEZK8M0Unkw9F_uMSBEDc,770 +flask_migrate/templates/flask/env.py,sha256=NHt0HDV4VDIldgRyTLdpCRam2rcXe7h5Lbsnbndy-Dc,2918 +flask_migrate/templates/flask/script.py.mako,sha256=8_xgA-gm_OhehnO7CiIijWgnm00ZlszEHtIHrAYFJl0,494 +flask_migrate/templates/flask-multidb/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38 +flask_migrate/templates/flask-multidb/alembic.ini.mako,sha256=zQU53x-FQXAbtuOxp3_hgtsEZK8M0Unkw9F_uMSBEDc,770 +flask_migrate/templates/flask-multidb/env.py,sha256=X_mUc9oCzJIcm_Xk82pWlE3U5kNJTzUhUDrbPTYxWW4,5618 +flask_migrate/templates/flask-multidb/script.py.mako,sha256=lVwJ36kfy6N1gRW7Lepg5EjXQ6Ouar4GTSBHcHXYHbs,965 +Flask_Migrate-2.5.2.dist-info/LICENSE,sha256=kfkXGlJQvKy3Y__6tAJ8ynIp1HQfeROXhL8jZU1d-DI,1082 +Flask_Migrate-2.5.2.dist-info/METADATA,sha256=Ito-s1ua2YuyWOrVQQGnK7OrdJiTmcVqcQt6uaejLTs,946 +Flask_Migrate-2.5.2.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +Flask_Migrate-2.5.2.dist-info/entry_points.txt,sha256=KIMh5vVHpfcQw9lq5G7y7cVhHgS-0DdbmIS8X7mnrzI,44 +Flask_Migrate-2.5.2.dist-info/top_level.txt,sha256=jLoPgiMG6oR4ugNteXn3IHskVVIyIXVStZOVq-AWLdU,14 +Flask_Migrate-2.5.2.dist-info/RECORD,, +Flask_Migrate-2.5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +flask_migrate/templates/flask/__pycache__/env.cpython-36.pyc,, +flask_migrate/templates/flask-multidb/__pycache__/env.cpython-36.pyc,, +flask_migrate/__pycache__/__init__.cpython-36.pyc,, +flask_migrate/__pycache__/cli.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/WHEEL new file mode 100644 index 0000000..78e6f69 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/entry_points.txt b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/entry_points.txt new file mode 100644 index 0000000..f15410e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[flask.commands] +db = flask_migrate.cli:db + diff --git a/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/top_level.txt new file mode 100644 index 0000000..0652762 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_Migrate-2.5.2.dist-info/top_level.txt @@ -0,0 +1 @@ +flask_migrate diff --git a/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/LICENSE.rst b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/LICENSE.rst new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/METADATA b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/METADATA new file mode 100644 index 0000000..8e988ca --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/METADATA @@ -0,0 +1,94 @@ +Metadata-Version: 2.1 +Name: Flask-SQLAlchemy +Version: 2.4.0 +Summary: Adds SQLAlchemy support to your Flask application. +Home-page: https://github.com/pallets/flask-sqlalchemy +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Documentation, https://flask-sqlalchemy.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/flask-sqlalchemy +Project-URL: Issue tracker, https://github.com/pallets/flask-sqlalchemy/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >= 2.7, != 3.0.*, != 3.1.*, != 3.2.*, != 3.3.* +Requires-Dist: Flask (>=0.10) +Requires-Dist: SQLAlchemy (>=0.8.0) + +Flask-SQLAlchemy +================ + +Flask-SQLAlchemy is an extension for `Flask`_ that adds support for +`SQLAlchemy`_ to your application. It aims to simplify using SQLAlchemy +with Flask by providing useful defaults and extra helpers that make it +easier to accomplish common tasks. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Flask-SQLAlchemy + + +A Simple Example +---------------- + +.. code-block:: python + + from flask import Flask + from flask_sqlalchemy import SQLAlchemy + + app = Flask(__name__) + app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///example.sqlite" + db = SQLAlchemy(app) + + + class User(db.Model): + id = db.Column(db.Integer, primary_key=True) + username = db.Column(db.String, unique=True, nullable=False) + email = db.Column(db.String, unique=True, nullable=False) + + + db.session.add(User(name="Flask", email="example@example.com")) + db.session.commit() + + users = User.query.all() + + +Links +----- + +- Documentation: https://flask-sqlalchemy.palletsprojects.com/ +- Releases: https://pypi.org/project/Flask-SQLAlchemy/ +- Code: https://github.com/pallets/flask-sqlalchemy +- Issue tracker: https://github.com/pallets/flask-sqlalchemy/issues +- Test status: https://travis-ci.org/pallets/flask-sqlalchemy +- Test coverage: https://codecov.io/gh/pallets/flask-sqlalchemy + +.. _Flask: https://palletsprojects.com/p/flask/ +.. _SQLAlchemy: https://www.sqlalchemy.org +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + diff --git a/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/RECORD b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/RECORD new file mode 100644 index 0000000..7925d1d --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/RECORD @@ -0,0 +1,14 @@ +flask_sqlalchemy/__init__.py,sha256=8H9-MlSbJjS69CBV8UYNYpDn3EkkU4S3wc7JouFz4eg,39027 +flask_sqlalchemy/_compat.py,sha256=yua0ZSgVWwi56QpEgwaPInzkNQ9PFb7YQdvEk3dImXo,821 +flask_sqlalchemy/model.py,sha256=7CTvGxxKmLscwcwq9mVT5ny_w301QZvTVjSqMoMx6DI,4974 +flask_sqlalchemy/utils.py,sha256=4eHqAbYElnJ3NbSAHhuINckoAHDABoxjleMJD0iKgyg,1390 +Flask_SQLAlchemy-2.4.0.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +Flask_SQLAlchemy-2.4.0.dist-info/METADATA,sha256=uvOSlKcGx935ng3QL57DdBnd612KPhCt5dhDX0TBizI,3128 +Flask_SQLAlchemy-2.4.0.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 +Flask_SQLAlchemy-2.4.0.dist-info/top_level.txt,sha256=w2K4fNNoTh4HItoFfz2FRQShSeLcvHYrzU_sZov21QU,17 +Flask_SQLAlchemy-2.4.0.dist-info/RECORD,, +Flask_SQLAlchemy-2.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +flask_sqlalchemy/__pycache__/_compat.cpython-36.pyc,, +flask_sqlalchemy/__pycache__/model.cpython-36.pyc,, +flask_sqlalchemy/__pycache__/__init__.cpython-36.pyc,, +flask_sqlalchemy/__pycache__/utils.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/WHEEL new file mode 100644 index 0000000..c8240f0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..8a5538e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_SQLAlchemy-2.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +flask_sqlalchemy diff --git a/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/DESCRIPTION.rst b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..7d7eef7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,21 @@ +Flask-WTF +========= + +.. image:: https://travis-ci.org/lepture/flask-wtf.svg?branch=master + :target: https://travis-ci.org/lepture/flask-wtf + :alt: Travis CI Status +.. image:: https://coveralls.io/repos/lepture/flask-wtf/badge.svg?branch=master + :target: https://coveralls.io/r/lepture/flask-wtf + :alt: Coverage Status + +Simple integration of Flask and WTForms, including CSRF, file upload, +and reCAPTCHA. + +Links +----- + +* `Documentation `_ +* `PyPI `_ +* `GitHub `_ + + diff --git a/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/LICENSE.txt b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/LICENSE.txt new file mode 100644 index 0000000..5cbad1a --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/LICENSE.txt @@ -0,0 +1,32 @@ +Copyright (c) 2010 by Dan Jacob. +Copyright (c) 2013 by Hsiaoming Yang. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + +* The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/METADATA b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/METADATA new file mode 100644 index 0000000..8dd02a4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/METADATA @@ -0,0 +1,52 @@ +Metadata-Version: 2.0 +Name: Flask-WTF +Version: 0.14.2 +Summary: Simple integration of Flask and WTForms. +Home-page: https://github.com/lepture/flask-wtf +Author: Hsiaoming Yang +Author-email: me@lepture.com +License: BSD +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Dist: Flask +Requires-Dist: WTForms + +Flask-WTF +========= + +.. image:: https://travis-ci.org/lepture/flask-wtf.svg?branch=master + :target: https://travis-ci.org/lepture/flask-wtf + :alt: Travis CI Status +.. image:: https://coveralls.io/repos/lepture/flask-wtf/badge.svg?branch=master + :target: https://coveralls.io/r/lepture/flask-wtf + :alt: Coverage Status + +Simple integration of Flask and WTForms, including CSRF, file upload, +and reCAPTCHA. + +Links +----- + +* `Documentation `_ +* `PyPI `_ +* `GitHub `_ + + diff --git a/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/RECORD b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/RECORD new file mode 100644 index 0000000..b0122b0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/RECORD @@ -0,0 +1,30 @@ +Flask_WTF-0.14.2.dist-info/DESCRIPTION.rst,sha256=vyJWnOD4vgnZ6x2ERr5EH1l2uzLxXCBhr_O1L6Ell2E,584 +Flask_WTF-0.14.2.dist-info/LICENSE.txt,sha256=oHX42YrP2wXdmHFiQrniwbOrmHIpJrPEz2yRasFOg1A,1490 +Flask_WTF-0.14.2.dist-info/METADATA,sha256=M8ZfImxUciRZ5Av5r1x37JnEC3wG5sacQv346wmldHU,1846 +Flask_WTF-0.14.2.dist-info/RECORD,, +Flask_WTF-0.14.2.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113 +Flask_WTF-0.14.2.dist-info/metadata.json,sha256=qGwhg5DSr2WilK8cvCcQsdrtDJ5NFgR1faLrO8YZCAY,1370 +Flask_WTF-0.14.2.dist-info/top_level.txt,sha256=zK3flQPSjYTkAMjB0V6Jhu3jyotC0biL1mMhzitYoog,10 +flask_wtf/__init__.py,sha256=zNLRzvfi7PLTc7jkqQT7pzgtsw9_9eN7BfO4fzwKxJc,406 +flask_wtf/_compat.py,sha256=4h1U_W5vbM9L8sJ4ZPFevuneM1TirnBTTVrsHRH3uUE,849 +flask_wtf/csrf.py,sha256=suKAZarzLIBuiJFqwP--RldEYabPj0DGfYkQA32Cc1E,11554 +flask_wtf/file.py,sha256=2UnODjSq47IjsFQMiu_z218vFA5pnQ9nL1FpX7hpK1M,2971 +flask_wtf/form.py,sha256=lpx-ItUnKjYOW8VxQpBAlbhoROJNd2PHi3v0loPPyYI,4948 +flask_wtf/html5.py,sha256=ReZHJto8DAZkO3BxUDdHnkyz5mM21KtqKYh0achJ5IM,372 +flask_wtf/i18n.py,sha256=xMB_jHCOaWfF1RXm7E6hsRHwPsUyVyKX2Rhy3tBOUgk,1790 +flask_wtf/recaptcha/__init__.py,sha256=q3TC7tZPSAZ3On3GApZKGn0EcydX4zprisbyTlhN3sQ,86 +flask_wtf/recaptcha/fields.py,sha256=kN_10iZYQcYg1EtxFp4B87BlFnnrJCktrh7bTykOVj4,453 +flask_wtf/recaptcha/validators.py,sha256=8UgjA72OxUyHVk_lm8-fGhPEvKgkMtsoFNt7yzjo0xw,2398 +flask_wtf/recaptcha/widgets.py,sha256=me-oaqMNPW2BLujNTuDHCXWcVhh6eI7wlm6_TIrIF_U,1267 +Flask_WTF-0.14.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +flask_wtf/recaptcha/__pycache__/__init__.cpython-36.pyc,, +flask_wtf/recaptcha/__pycache__/fields.cpython-36.pyc,, +flask_wtf/recaptcha/__pycache__/validators.cpython-36.pyc,, +flask_wtf/recaptcha/__pycache__/widgets.cpython-36.pyc,, +flask_wtf/__pycache__/_compat.cpython-36.pyc,, +flask_wtf/__pycache__/form.cpython-36.pyc,, +flask_wtf/__pycache__/i18n.cpython-36.pyc,, +flask_wtf/__pycache__/html5.cpython-36.pyc,, +flask_wtf/__pycache__/file.cpython-36.pyc,, +flask_wtf/__pycache__/csrf.cpython-36.pyc,, +flask_wtf/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/WHEEL new file mode 100644 index 0000000..7bf9daa --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0.a0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/metadata.json b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/metadata.json new file mode 100644 index 0000000..d48bac6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Flask", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"contacts": [{"email": "me@lepture.com", "name": "Hsiaoming Yang", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/lepture/flask-wtf"}}}, "extras": [], "generator": "bdist_wheel (0.30.0.a0)", "license": "BSD", "metadata_version": "2.0", "name": "Flask-WTF", "platform": "any", "run_requires": [{"requires": ["Flask", "WTForms"]}], "summary": "Simple integration of Flask and WTForms.", "version": "0.14.2"} \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/top_level.txt new file mode 100644 index 0000000..716f422 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Flask_WTF-0.14.2.dist-info/top_level.txt @@ -0,0 +1 @@ +flask_wtf diff --git a/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/LICENSE b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/LICENSE new file mode 100644 index 0000000..8a5dad9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/LICENSE @@ -0,0 +1,22 @@ +Copyright P G Jones 2018. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/METADATA b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/METADATA new file mode 100644 index 0000000..ee09fc4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/METADATA @@ -0,0 +1,122 @@ +Metadata-Version: 2.1 +Name: Hypercorn +Version: 0.5.4 +Summary: A ASGI Server based on Hyper libraries and inspired by Gunicorn. +Home-page: https://gitlab.com/pgjones/hypercorn/ +Author: P G Jones +Author-email: philip.graham.jones@googlemail.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6.1 +Requires-Dist: h11 +Requires-Dist: h2 (>=3.1.0) +Requires-Dist: pytoml +Requires-Dist: typing-extensions +Requires-Dist: wsproto (>=0.14.0) +Provides-Extra: trio +Requires-Dist: trio (>=0.9.0) ; extra == 'trio' +Provides-Extra: uvloop +Requires-Dist: uvloop ; extra == 'uvloop' + +Hypercorn +========= + +.. image:: https://assets.gitlab-static.net/pgjones/hypercorn/raw/master/artwork/logo.png + :alt: Hypercorn logo + +|Build Status| |docs| |pypi| |http| |python| |license| + +Hypercorn is an `ASGI +`_ web +server based on the sans-io hyper, `h11 +`_, `h2 +`_, and `wsproto +`_ libraries and inspired by +Gunicorn. Hypercorn supports HTTP/1, HTTP/2, and WebSockets (over +HTTP/1 and HTTP/2) and the ASGI 2 specification. Hypercorn can utilise +asyncio, uvloop, or trio worker types. + +Hypercorn was initially part of `Quart +`_ before being separated out into a +standalone ASGI server. Hypercorn forked from version 0.5.0 of Quart. + +Quickstart +---------- + +Hypercorn can be installed via `pipenv +`_ or +`pip `_, + +.. code-block:: console + + $ pipenv install hypercorn + $ pip install hypercorn + +and requires Python 3.6.1 or higher. + +With hypercorn installed ASGI frameworks (or apps) can be served via +Hypercorn via the command line, + +.. code-block:: console + + $ hypercorn module:app + +Contributing +------------ + +Hypercorn is developed on `GitLab +`_. You are very welcome to open +`issues `_ or propose +`merge requests +`_. + +Testing +~~~~~~~ + +The best way to test Hypercorn is with Tox, + +.. code-block:: console + + $ pipenv install tox + $ tox + +this will check the code style and run the tests. + +Help +---- + +The Hypercorn `documentation `_ +is the best place to start, after that try opening an `issue +`_. + + +.. |Build Status| image:: https://gitlab.com/pgjones/hypercorn/badges/master/build.svg + :target: https://gitlab.com/pgjones/hypercorn/commits/master + +.. |docs| image:: https://img.shields.io/badge/docs-passing-brightgreen.svg + :target: https://pgjones.gitlab.io/hypercorn/ + +.. |pypi| image:: https://img.shields.io/pypi/v/hypercorn.svg + :target: https://pypi.python.org/pypi/Hypercorn/ + +.. |http| image:: https://img.shields.io/badge/http-1.0,1.1,2-orange.svg + :target: https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol + +.. |python| image:: https://img.shields.io/pypi/pyversions/hypercorn.svg + :target: https://pypi.python.org/pypi/Hypercorn/ + +.. |license| image:: https://img.shields.io/badge/license-MIT-blue.svg + :target: https://gitlab.com/pgjones/hypercorn/blob/master/LICENSE + + diff --git a/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/RECORD b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/RECORD new file mode 100644 index 0000000..5ac7a81 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/RECORD @@ -0,0 +1,65 @@ +hypercorn/__about__.py,sha256=DITpct-LrdIsTgwx2NgH5Ghx5y8Xgz1YMimy1ZV5RTY,22 +hypercorn/__init__.py,sha256=OzD1N4XyVugGBNvMn-4YVDweLKZZNusHjcuiWlE-D3g,149 +hypercorn/__main__.py,sha256=atMbWGzqq7tHyEDPb-cB5aikH25nW1UYNxLvxNVG__Y,6090 +hypercorn/config.py,sha256=vbFOcEvw35KKVWBRDulTqfVaLBCDcGrO_F5EDOPHX_Y,10878 +hypercorn/logging.py,sha256=mRXtzLKpdHlXCjjYbXtDHc-qQmm_5HO040zvnRaV_7U,3080 +hypercorn/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +hypercorn/run.py,sha256=RKs66PWt9QJFnXdtnwKG3F5NXsVHOh_JA-q0R70AAfg,2169 +hypercorn/typing.py,sha256=ioycndhELt90ukx4KdPqfjdvsFeX_l9PYgvK6Y3Y880,1582 +hypercorn/utils.py,sha256=Qn7c3svG_m6sGSoZ4UM-hQ_dDgxvrgHKNEWmVhHGUFE,4107 +hypercorn/asgi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +hypercorn/asgi/h11.py,sha256=6GTpU7c840ejxYiPn4jr33GCLPwYEAbDRFlMdA1yXHk,5991 +hypercorn/asgi/h2.py,sha256=-stjtOtV116gBU6wAtxNLx79gH2TWmXRrCZiCc-NQ1I,12254 +hypercorn/asgi/run.py,sha256=nnyM2FjSKgaYLoFGwBjZ_7A1MOYEapBuDlt1vLUCKSY,374 +hypercorn/asgi/utils.py,sha256=p458ox4XpvAeVCsEfj7y7qoA4BGOiGJVnGX2Mbe6_pg,2253 +hypercorn/asgi/wsproto.py,sha256=yA8C-8hnPpWvBnzG9oZsSkS0oTB9Q-o5NJ5_oOHT3s4,6780 +hypercorn/asyncio/__init__.py,sha256=iDD5pw0FJRaolEnPCR8pAtR_QxwV0jK3Odw5id7WLdQ,992 +hypercorn/asyncio/base.py,sha256=mJ05MB4o-o-elKKwhJCzfQcHz1vVCSlggpChwtgJC7k,2624 +hypercorn/asyncio/h11.py,sha256=A9hXNYijHMQqeUV8FCjqwjd70y3UDF8pio6N5ReQ6_s,4667 +hypercorn/asyncio/h2.py,sha256=4BnNGer4P4LF_OFUlyCNJYWUfSrgRRtb1snbJRlF6DQ,13498 +hypercorn/asyncio/lifespan.py,sha256=k6A_NKCDgpqFPyt5d2j3i5OxwQEvJdYznC4d3kD5_eU,2586 +hypercorn/asyncio/run.py,sha256=4bftBV-cYAQl57Z00HuFxcokyeHij4U0HX7vos_KwXA,8455 +hypercorn/asyncio/wsproto.py,sha256=SMyIz415vLzqsoqL2HgvDnAzkFz-arGEZUA9j-agRrY,3883 +hypercorn/trio/__init__.py,sha256=SzUj79gSY3KI7GtBPVdS2ftI50SYvrlXDcaPYUbjUuQ,1127 +hypercorn/trio/base.py,sha256=c1GifLedS9VD7Rxr-Sb2dV4iCGzp_R0S8i6yEiX2IMU,1008 +hypercorn/trio/h11.py,sha256=cttF-Crx8Dclj9sGgI8qrcFzZ87_U-LJR4q7sELU1Ps,5258 +hypercorn/trio/h2.py,sha256=IV1hp_3UCRtskWjIktHKp--3mjQtJFWyA3yXrVb3Ug8,13893 +hypercorn/trio/lifespan.py,sha256=KTd_rbp8FEc5pyRzIkoElTBlVnbKYNeAf3H-MP-ctm0,2339 +hypercorn/trio/run.py,sha256=XtgtyGY9jy6-zO9H_nbwXQXUPhwscw_fEwWeA-HT8yg,3922 +hypercorn/trio/wsproto.py,sha256=QzOk1D90tLjtSlVo_NALitmYyocoxOoA2k5Ma5VYAH4,3987 +Hypercorn-0.5.4.dist-info/LICENSE,sha256=HjGdhzPYNEZ3zQ7mwqjMBgHHRId5HViyayt-8GMMK38,1050 +Hypercorn-0.5.4.dist-info/METADATA,sha256=10FBuDyLRVZBIzU6ycHub3Fti8wgC6-7w0yCXFT-B_M,3900 +Hypercorn-0.5.4.dist-info/WHEEL,sha256=_NOXIqFgOaYmlm9RJLPQZ13BJuEIrp5jx5ptRD5uh3Y,92 +Hypercorn-0.5.4.dist-info/entry_points.txt,sha256=HV9e4tNKTeIJbe1PIswRWGn4jbCUQqfIbcAIV68Znuk,55 +Hypercorn-0.5.4.dist-info/top_level.txt,sha256=zSgQlh13xK80SFqsUPaG5-83qTJOzqYTBfP9XFcvB_Y,10 +Hypercorn-0.5.4.dist-info/RECORD,, +../../../bin/hypercorn,sha256=o7LN5QkiQLnpDR8qjX6Oa48zvOutA54RoefUgySWQpY,265 +Hypercorn-0.5.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +hypercorn/asyncio/__pycache__/h11.cpython-36.pyc,, +hypercorn/asyncio/__pycache__/h2.cpython-36.pyc,, +hypercorn/asyncio/__pycache__/base.cpython-36.pyc,, +hypercorn/asyncio/__pycache__/lifespan.cpython-36.pyc,, +hypercorn/asyncio/__pycache__/__init__.cpython-36.pyc,, +hypercorn/asyncio/__pycache__/wsproto.cpython-36.pyc,, +hypercorn/asyncio/__pycache__/run.cpython-36.pyc,, +hypercorn/__pycache__/config.cpython-36.pyc,, +hypercorn/__pycache__/__about__.cpython-36.pyc,, +hypercorn/__pycache__/__main__.cpython-36.pyc,, +hypercorn/__pycache__/typing.cpython-36.pyc,, +hypercorn/__pycache__/__init__.cpython-36.pyc,, +hypercorn/__pycache__/logging.cpython-36.pyc,, +hypercorn/__pycache__/run.cpython-36.pyc,, +hypercorn/__pycache__/utils.cpython-36.pyc,, +hypercorn/trio/__pycache__/h11.cpython-36.pyc,, +hypercorn/trio/__pycache__/h2.cpython-36.pyc,, +hypercorn/trio/__pycache__/base.cpython-36.pyc,, +hypercorn/trio/__pycache__/lifespan.cpython-36.pyc,, +hypercorn/trio/__pycache__/__init__.cpython-36.pyc,, +hypercorn/trio/__pycache__/wsproto.cpython-36.pyc,, +hypercorn/trio/__pycache__/run.cpython-36.pyc,, +hypercorn/asgi/__pycache__/h11.cpython-36.pyc,, +hypercorn/asgi/__pycache__/h2.cpython-36.pyc,, +hypercorn/asgi/__pycache__/__init__.cpython-36.pyc,, +hypercorn/asgi/__pycache__/wsproto.cpython-36.pyc,, +hypercorn/asgi/__pycache__/run.cpython-36.pyc,, +hypercorn/asgi/__pycache__/utils.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/WHEEL new file mode 100644 index 0000000..4eeaea1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.3) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/entry_points.txt b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/entry_points.txt new file mode 100644 index 0000000..d9c53b5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +hypercorn = hypercorn.__main__:main + diff --git a/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/top_level.txt new file mode 100644 index 0000000..083a5e9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Hypercorn-0.5.4.dist-info/top_level.txt @@ -0,0 +1 @@ +hypercorn diff --git a/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/LICENSE b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/LICENSE new file mode 100644 index 0000000..10145a2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/LICENSE @@ -0,0 +1,31 @@ +Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/METADATA b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/METADATA new file mode 100644 index 0000000..fb4a867 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/METADATA @@ -0,0 +1,67 @@ +Metadata-Version: 2.1 +Name: Jinja2 +Version: 2.10.1 +Summary: A small but fast and easy to use stand-alone template engine written in pure python. +Home-page: http://jinja.pocoo.org/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Dist: MarkupSafe (>=0.23) +Provides-Extra: i18n +Requires-Dist: Babel (>=0.8) ; extra == 'i18n' + + +Jinja2 +~~~~~~ + +Jinja2 is a template engine written in pure Python. It provides a +`Django`_ inspired non-XML syntax but supports inline expressions and +an optional `sandboxed`_ environment. + +Nutshell +-------- + +Here a small example of a Jinja template:: + + {% extends 'base.html' %} + {% block title %}Memberlist{% endblock %} + {% block content %} + + {% endblock %} + +Philosophy +---------- + +Application logic is for the controller but don't try to make the life +for the template designer too hard by giving him too few functionality. + +For more informations visit the new `Jinja2 webpage`_ and `documentation`_. + +.. _sandboxed: https://en.wikipedia.org/wiki/Sandbox_(computer_security) +.. _Django: https://www.djangoproject.com/ +.. _Jinja2 webpage: http://jinja.pocoo.org/ +.. _documentation: http://jinja.pocoo.org/2/documentation/ + + diff --git a/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/RECORD b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/RECORD new file mode 100644 index 0000000..5cbdd18 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/RECORD @@ -0,0 +1,61 @@ +jinja2/__init__.py,sha256=V1D-JHQKklZseXOMA-uAW7-BeKe_TfPpOFi9-dV04ZA,2616 +jinja2/_compat.py,sha256=xP60CE5Qr8FTYcDE1f54tbZLKGvMwYml4-8T7Q4KG9k,2596 +jinja2/_identifier.py,sha256=W1QBSY-iJsyt6oR_nKSuNNCzV95vLIOYgUNPUI1d5gU,1726 +jinja2/asyncfilters.py,sha256=cTDPvrS8Hp_IkwsZ1m9af_lr5nHysw7uTa5gV0NmZVE,4144 +jinja2/asyncsupport.py,sha256=UErQ3YlTLaSjFb94P4MVn08-aVD9jJxty2JVfMRb-1M,7878 +jinja2/bccache.py,sha256=nQldx0ZRYANMyfvOihRoYFKSlUdd5vJkS7BjxNwlOZM,12794 +jinja2/compiler.py,sha256=BqC5U6JxObSRhblyT_a6Tp5GtEU5z3US1a4jLQaxxgo,65386 +jinja2/constants.py,sha256=uwwV8ZUhHhacAuz5PTwckfsbqBaqM7aKfyJL7kGX5YQ,1626 +jinja2/debug.py,sha256=WTVeUFGUa4v6ReCsYv-iVPa3pkNB75OinJt3PfxNdXs,12045 +jinja2/defaults.py,sha256=Em-95hmsJxIenDCZFB1YSvf9CNhe9rBmytN3yUrBcWA,1400 +jinja2/environment.py,sha256=VnkAkqw8JbjZct4tAyHlpBrka2vqB-Z58RAP-32P1ZY,50849 +jinja2/exceptions.py,sha256=_Rj-NVi98Q6AiEjYQOsP8dEIdu5AlmRHzcSNOPdWix4,4428 +jinja2/ext.py,sha256=atMQydEC86tN1zUsdQiHw5L5cF62nDbqGue25Yiu3N4,24500 +jinja2/filters.py,sha256=yOAJk0MsH-_gEC0i0U6NweVQhbtYaC-uE8xswHFLF4w,36528 +jinja2/idtracking.py,sha256=2GbDSzIvGArEBGLkovLkqEfmYxmWsEf8c3QZwM4uNsw,9197 +jinja2/lexer.py,sha256=ySEPoXd1g7wRjsuw23uimS6nkGN5aqrYwcOKxCaVMBQ,28559 +jinja2/loaders.py,sha256=xiTuURKAEObyym0nU8PCIXu_Qp8fn0AJ5oIADUUm-5Q,17382 +jinja2/meta.py,sha256=fmKHxkmZYAOm9QyWWy8EMd6eefAIh234rkBMW2X4ZR8,4340 +jinja2/nativetypes.py,sha256=_sJhS8f-8Q0QMIC0dm1YEdLyxEyoO-kch8qOL5xUDfE,7308 +jinja2/nodes.py,sha256=L10L_nQDfubLhO3XjpF9qz46FSh2clL-3e49ogVlMmA,30853 +jinja2/optimizer.py,sha256=MsdlFACJ0FRdPtjmCAdt7JQ9SGrXFaDNUaslsWQaG3M,1722 +jinja2/parser.py,sha256=lPzTEbcpTRBLw8ii6OYyExHeAhaZLMA05Hpv4ll3ULk,35875 +jinja2/runtime.py,sha256=DHdD38Pq8gj7uWQC5usJyWFoNWL317A9AvXOW_CLB34,27755 +jinja2/sandbox.py,sha256=UmX8hVjdaERCbA3RXBwrV1f-beA23KmijG5kzPJyU4A,17106 +jinja2/tests.py,sha256=iJQLwbapZr-EKquTG_fVOVdwHUUKf3SX9eNkjQDF8oU,4237 +jinja2/utils.py,sha256=q24VupGZotQ-uOyrJxCaXtDWhZC1RgsQG7kcdmjck2Q,20629 +jinja2/visitor.py,sha256=JD1H1cANA29JcntFfN5fPyqQxB4bI4wC00BzZa-XHks,3316 +Jinja2-2.10.1.dist-info/LICENSE,sha256=JvzUNv3Io51EiWrAPm8d_SXjhJnEjyDYvB3Tvwqqils,1554 +Jinja2-2.10.1.dist-info/METADATA,sha256=rx0eN8lX8iq8-YVppmCzV1Qx4y3Pj9IWi08mXUCrewI,2227 +Jinja2-2.10.1.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 +Jinja2-2.10.1.dist-info/entry_points.txt,sha256=NdzVcOrqyNyKDxD09aERj__3bFx2paZhizFDsKmVhiA,72 +Jinja2-2.10.1.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +Jinja2-2.10.1.dist-info/RECORD,, +Jinja2-2.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jinja2/__pycache__/visitor.cpython-36.pyc,, +jinja2/__pycache__/nativetypes.cpython-36.pyc,, +jinja2/__pycache__/constants.cpython-36.pyc,, +jinja2/__pycache__/defaults.cpython-36.pyc,, +jinja2/__pycache__/lexer.cpython-36.pyc,, +jinja2/__pycache__/_compat.cpython-36.pyc,, +jinja2/__pycache__/environment.cpython-36.pyc,, +jinja2/__pycache__/ext.cpython-36.pyc,, +jinja2/__pycache__/tests.cpython-36.pyc,, +jinja2/__pycache__/idtracking.cpython-36.pyc,, +jinja2/__pycache__/nodes.cpython-36.pyc,, +jinja2/__pycache__/asyncfilters.cpython-36.pyc,, +jinja2/__pycache__/asyncsupport.cpython-36.pyc,, +jinja2/__pycache__/optimizer.cpython-36.pyc,, +jinja2/__pycache__/runtime.cpython-36.pyc,, +jinja2/__pycache__/meta.cpython-36.pyc,, +jinja2/__pycache__/parser.cpython-36.pyc,, +jinja2/__pycache__/loaders.cpython-36.pyc,, +jinja2/__pycache__/filters.cpython-36.pyc,, +jinja2/__pycache__/compiler.cpython-36.pyc,, +jinja2/__pycache__/__init__.cpython-36.pyc,, +jinja2/__pycache__/sandbox.cpython-36.pyc,, +jinja2/__pycache__/bccache.cpython-36.pyc,, +jinja2/__pycache__/utils.cpython-36.pyc,, +jinja2/__pycache__/_identifier.cpython-36.pyc,, +jinja2/__pycache__/debug.cpython-36.pyc,, +jinja2/__pycache__/exceptions.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/WHEEL new file mode 100644 index 0000000..c8240f0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/entry_points.txt b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/entry_points.txt new file mode 100644 index 0000000..32e6b75 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/entry_points.txt @@ -0,0 +1,4 @@ + + [babel.extractors] + jinja2 = jinja2.ext:babel_extract[i18n] + \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/top_level.txt new file mode 100644 index 0000000..7f7afbf --- /dev/null +++ b/py3/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/PKG-INFO b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/PKG-INFO new file mode 100644 index 0000000..d409ca0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/PKG-INFO @@ -0,0 +1,75 @@ +Metadata-Version: 1.2 +Name: Mako +Version: 1.1.0 +Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. +Home-page: https://www.makotemplates.org/ +Author: Mike Bayer +Author-email: mike@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.makotemplates.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako +Description: ========================= + Mako Templates for Python + ========================= + + Mako is a template library written in Python. It provides a familiar, non-XML + syntax which compiles into Python modules for maximum performance. Mako's + syntax and API borrows from the best ideas of many others, including Django + templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded + Python (i.e. Python Server Page) language, which refines the familiar ideas + of componentized layout and inheritance to produce one of the most + straightforward and flexible models available, while also maintaining close + ties to Python calling and scoping semantics. + + Nutshell + ======== + + :: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
+ + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + + Philosophy + =========== + + Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + + Documentation + ============== + + See documentation for Mako at https://docs.makotemplates.org/en/latest/ + + License + ======== + + Mako is licensed under an MIT-style license (see LICENSE). + Other incorporated projects may be licensed under different licenses. + All licenses allow for non-commercial and commercial use. + +Keywords: templates +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* diff --git a/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/SOURCES.txt b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/SOURCES.txt new file mode 100644 index 0000000..4e41159 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/SOURCES.txt @@ -0,0 +1,161 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +README.rst +setup.cfg +setup.py +tox.ini +Mako.egg-info/PKG-INFO +Mako.egg-info/SOURCES.txt +Mako.egg-info/dependency_links.txt +Mako.egg-info/entry_points.txt +Mako.egg-info/not-zip-safe +Mako.egg-info/requires.txt +Mako.egg-info/top_level.txt +doc/caching.html +doc/changelog.html +doc/defs.html +doc/filtering.html +doc/genindex.html +doc/index.html +doc/inheritance.html +doc/namespaces.html +doc/runtime.html +doc/search.html +doc/searchindex.js +doc/syntax.html +doc/unicode.html +doc/usage.html +doc/_static/basic.css +doc/_static/changelog.css +doc/_static/docs.css +doc/_static/doctools.js +doc/_static/documentation_options.js +doc/_static/file.png +doc/_static/jquery-3.2.1.js +doc/_static/jquery.js +doc/_static/language_data.js +doc/_static/minus.png +doc/_static/plus.png +doc/_static/pygments.css +doc/_static/searchtools.js +doc/_static/sphinx_paramlinks.css +doc/_static/underscore-1.3.1.js +doc/_static/underscore.js +doc/build/Makefile +doc/build/caching.rst +doc/build/changelog.rst +doc/build/conf.py +doc/build/defs.rst +doc/build/filtering.rst +doc/build/index.rst +doc/build/inheritance.rst +doc/build/namespaces.rst +doc/build/requirements.txt +doc/build/runtime.rst +doc/build/syntax.rst +doc/build/unicode.rst +doc/build/usage.rst +doc/build/unreleased/README.txt +examples/bench/basic.py +examples/bench/cheetah/footer.tmpl +examples/bench/cheetah/header.tmpl +examples/bench/cheetah/template.tmpl +examples/bench/django/templatetags/__init__.py +examples/bench/django/templatetags/bench.py +examples/bench/kid/base.kid +examples/bench/kid/template.kid +examples/bench/myghty/base.myt +examples/bench/myghty/template.myt +examples/wsgi/run_wsgi.py +mako/__init__.py +mako/_ast_util.py +mako/ast.py +mako/cache.py +mako/cmd.py +mako/codegen.py +mako/compat.py +mako/exceptions.py +mako/filters.py +mako/lexer.py +mako/lookup.py +mako/parsetree.py +mako/pygen.py +mako/pyparser.py +mako/runtime.py +mako/template.py +mako/util.py +mako/ext/__init__.py +mako/ext/autohandler.py +mako/ext/babelplugin.py +mako/ext/beaker_cache.py +mako/ext/extract.py +mako/ext/linguaplugin.py +mako/ext/preprocessors.py +mako/ext/pygmentplugin.py +mako/ext/turbogears.py +test/__init__.py +test/sample_module_namespace.py +test/test_ast.py +test/test_block.py +test/test_cache.py +test/test_call.py +test/test_cmd.py +test/test_decorators.py +test/test_def.py +test/test_exceptions.py +test/test_filters.py +test/test_inheritance.py +test/test_lexer.py +test/test_lookup.py +test/test_loop.py +test/test_lru.py +test/test_namespace.py +test/test_pygen.py +test/test_runtime.py +test/test_template.py +test/test_tgplugin.py +test/test_util.py +test/util.py +test/ext/__init__.py +test/ext/test_babelplugin.py +test/ext/test_linguaplugin.py +test/foo/__init__.py +test/foo/mod_no_encoding.py +test/foo/test_ns.py +test/templates/badbom.html +test/templates/bom.html +test/templates/bommagic.html +test/templates/chs_unicode.html +test/templates/chs_unicode_py3k.html +test/templates/chs_utf8.html +test/templates/cmd_good.mako +test/templates/cmd_runtime.mako +test/templates/cmd_syntax.mako +test/templates/crlf.html +test/templates/gettext.mako +test/templates/gettext_cp1251.mako +test/templates/gettext_utf8.mako +test/templates/index.html +test/templates/internationalization.html +test/templates/modtest.html +test/templates/read_unicode.html +test/templates/read_unicode_py3k.html +test/templates/runtimeerr.html +test/templates/runtimeerr_py3k.html +test/templates/unicode.html +test/templates/unicode_arguments.html +test/templates/unicode_arguments_py3k.html +test/templates/unicode_code.html +test/templates/unicode_code_py3k.html +test/templates/unicode_expr.html +test/templates/unicode_expr_py3k.html +test/templates/unicode_runtime_error.html +test/templates/unicode_syntax_error.html +test/templates/foo/modtest.html.py +test/templates/othersubdir/foo.html +test/templates/subdir/incl.html +test/templates/subdir/index.html +test/templates/subdir/modtest.html +test/templates/subdir/foo/modtest.html.py \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/dependency_links.txt b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/entry_points.txt b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/entry_points.txt new file mode 100644 index 0000000..3b15006 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/entry_points.txt @@ -0,0 +1,20 @@ + + [python.templating.engines] + mako = mako.ext.turbogears:TGPlugin + + [pygments.lexers] + mako = mako.ext.pygmentplugin:MakoLexer + html+mako = mako.ext.pygmentplugin:MakoHtmlLexer + xml+mako = mako.ext.pygmentplugin:MakoXmlLexer + js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer + css+mako = mako.ext.pygmentplugin:MakoCssLexer + + [babel.extractors] + mako = mako.ext.babelplugin:extract + + [lingua.extractors] + mako = mako.ext.linguaplugin:LinguaMakoExtractor + + [console_scripts] + mako-render = mako.cmd:cmdline + \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/installed-files.txt b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/installed-files.txt new file mode 100644 index 0000000..029c956 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/installed-files.txt @@ -0,0 +1,60 @@ +../mako/parsetree.py +../mako/codegen.py +../mako/compat.py +../mako/lookup.py +../mako/filters.py +../mako/pyparser.py +../mako/pygen.py +../mako/_ast_util.py +../mako/ast.py +../mako/exceptions.py +../mako/lexer.py +../mako/cache.py +../mako/cmd.py +../mako/__init__.py +../mako/util.py +../mako/template.py +../mako/runtime.py +../mako/ext/babelplugin.py +../mako/ext/turbogears.py +../mako/ext/pygmentplugin.py +../mako/ext/beaker_cache.py +../mako/ext/extract.py +../mako/ext/preprocessors.py +../mako/ext/autohandler.py +../mako/ext/__init__.py +../mako/ext/linguaplugin.py +../mako/__pycache__/parsetree.cpython-36.pyc +../mako/__pycache__/codegen.cpython-36.pyc +../mako/__pycache__/compat.cpython-36.pyc +../mako/__pycache__/lookup.cpython-36.pyc +../mako/__pycache__/filters.cpython-36.pyc +../mako/__pycache__/pyparser.cpython-36.pyc +../mako/__pycache__/pygen.cpython-36.pyc +../mako/__pycache__/_ast_util.cpython-36.pyc +../mako/__pycache__/ast.cpython-36.pyc +../mako/__pycache__/exceptions.cpython-36.pyc +../mako/__pycache__/lexer.cpython-36.pyc +../mako/__pycache__/cache.cpython-36.pyc +../mako/__pycache__/cmd.cpython-36.pyc +../mako/__pycache__/__init__.cpython-36.pyc +../mako/__pycache__/util.cpython-36.pyc +../mako/__pycache__/template.cpython-36.pyc +../mako/__pycache__/runtime.cpython-36.pyc +../mako/ext/__pycache__/babelplugin.cpython-36.pyc +../mako/ext/__pycache__/turbogears.cpython-36.pyc +../mako/ext/__pycache__/pygmentplugin.cpython-36.pyc +../mako/ext/__pycache__/beaker_cache.cpython-36.pyc +../mako/ext/__pycache__/extract.cpython-36.pyc +../mako/ext/__pycache__/preprocessors.cpython-36.pyc +../mako/ext/__pycache__/autohandler.cpython-36.pyc +../mako/ext/__pycache__/__init__.cpython-36.pyc +../mako/ext/__pycache__/linguaplugin.cpython-36.pyc +PKG-INFO +requires.txt +not-zip-safe +entry_points.txt +top_level.txt +SOURCES.txt +dependency_links.txt +../../../../bin/mako-render diff --git a/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/not-zip-safe b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/requires.txt b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/requires.txt new file mode 100644 index 0000000..4083f59 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/requires.txt @@ -0,0 +1 @@ +MarkupSafe>=0.9.2 diff --git a/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/top_level.txt b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/top_level.txt new file mode 100644 index 0000000..2951cdd --- /dev/null +++ b/py3/lib/python3.6/site-packages/Mako-1.1.0.egg-info/top_level.txt @@ -0,0 +1 @@ +mako diff --git a/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.txt b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.txt new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/METADATA b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/METADATA new file mode 100644 index 0000000..b208d93 --- /dev/null +++ b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 1.1.1 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: The Pallets Team +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/markupsafe +Project-URL: Issue tracker, https://github.com/pallets/markupsafe/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + >>> # escape replaces special characters and wraps in Markup + >>> escape('') + Markup(u'<script>alert(document.cookie);</script>') + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup('Hello') + Markup('hello') + >>> escape(Markup('Hello')) + Markup('hello') + >>> # Markup is a text subclass (str on Python 3, unicode on Python 2) + >>> # methods and operators escape their arguments + >>> template = Markup("Hello %s") + >>> template % '"World"' + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +libraries that use it. In order to grow the community of contributors +and users, and allow the maintainers to devote more time to the +projects, `please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +* Website: https://palletsprojects.com/p/markupsafe/ +* Documentation: https://markupsafe.palletsprojects.com/ +* License: `BSD-3-Clause `_ +* Releases: https://pypi.org/project/MarkupSafe/ +* Code: https://github.com/pallets/markupsafe +* Issue tracker: https://github.com/pallets/markupsafe/issues +* Test status: + + * Linux, Mac: https://travis-ci.org/pallets/markupsafe + * Windows: https://ci.appveyor.com/project/pallets/markupsafe + +* Test coverage: https://codecov.io/gh/pallets/markupsafe + + diff --git a/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/RECORD b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/RECORD new file mode 100644 index 0000000..57ac82c --- /dev/null +++ b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/RECORD @@ -0,0 +1,16 @@ +MarkupSafe-1.1.1.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-1.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +MarkupSafe-1.1.1.dist-info/RECORD,, +MarkupSafe-1.1.1.dist-info/METADATA,sha256=nJHwJ4_4ka-V39QH883jPrslj6inNdyyNASBXbYgHXQ,3570 +MarkupSafe-1.1.1.dist-info/WHEEL,sha256=d2ILPScH-y2UwGxsW1PeA2TT-KW0Git4AJ6LeOK8sQo,109 +markupsafe/_constants.py,sha256=zo2ajfScG-l1Sb_52EP3MlDCqO7Y1BVHUXXKRsVDRNk,4690 +markupsafe/_compat.py,sha256=uEW1ybxEjfxIiuTbRRaJpHsPFf4yQUMMKaPgYEC5XbU,558 +markupsafe/_native.py,sha256=d-8S_zzYt2y512xYcuSxq0NeG2DUUvG80wVdTn-4KI8,1873 +markupsafe/__init__.py,sha256=oTblO5f9KFM-pvnq9bB0HgElnqkJyqHnFN1Nx2NIvnY,10126 +markupsafe/_speedups.cpython-36m-x86_64-linux-gnu.so,sha256=YAxqjdtS2XJQ043wfxMAnE1u7KpsBc49UrW9yGPiK0w,38875 +markupsafe/_speedups.c,sha256=k0fzEIK3CP6MmMqeY0ob43TP90mVN0DTyn7BAl3RqSg,9884 +MarkupSafe-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +markupsafe/__pycache__/_native.cpython-36.pyc,, +markupsafe/__pycache__/_compat.cpython-36.pyc,, +markupsafe/__pycache__/_constants.cpython-36.pyc,, +markupsafe/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL new file mode 100644 index 0000000..92946fe --- /dev/null +++ b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: false +Tag: cp36-cp36m-manylinux1_x86_64 + diff --git a/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/py3/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/LICENSE b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/LICENSE new file mode 100644 index 0000000..6417781 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/LICENSE @@ -0,0 +1,22 @@ +Copyright P G Jones 2017. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/METADATA b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/METADATA new file mode 100644 index 0000000..19f11a3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/METADATA @@ -0,0 +1,178 @@ +Metadata-Version: 2.1 +Name: Quart +Version: 0.6.13 +Summary: A Python ASGI web microframework with the same API as Flask +Home-page: https://gitlab.com/pgjones/quart/ +Author: P G Jones +Author-email: philip.graham.jones@googlemail.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6.1 +Requires-Dist: aiofiles +Requires-Dist: blinker +Requires-Dist: click +Requires-Dist: hypercorn (<0.6,>=0.4) +Requires-Dist: itsdangerous +Requires-Dist: jinja2 +Requires-Dist: multidict +Requires-Dist: sortedcontainers + +Quart +===== + +.. image:: https://assets.gitlab-static.net/pgjones/quart/raw/master/artwork/logo.png + :alt: Quart logo + +|Build Status| |docs| |pypi| |http| |python| |license| |chat| + +Quart is a Python `ASGI +`_ web +microframework. It is intended to provide the easiest way to use +asyncio functionality in a web context, especially with existing Flask +apps. This is possible as the Quart API is a superset of the `Flask +`_ API. + +Quart aims to be a complete web microframework, as it supports +HTTP/1.1, HTTP/2 and websockets. Quart is very extendable and has a +number of known `extensions +`_ and works +with many of the `Flask extensions +`_. + +Quickstart +---------- + +Quart can be installed via `pipenv +`_ or +`pip `_, + +.. code-block:: console + + $ pipenv install quart + $ pip install quart + +and requires Python 3.6.1 or higher. + +A minimal Quart example is, + +.. code-block:: python + + from quart import Quart, websocket + + app = Quart(__name__) + + @app.route('/') + async def hello(): + return 'hello' + + @app.websocket('/ws') + async def ws(): + while True: + await websocket.send('hello') + + app.run() + +if the above is in a file called ``app.py`` it can be run as, + +.. code-block:: console + + $ python app.py + +To deploy in a production setting see the `deployment +`_ documentation. + +Features +-------- + +Quart supports the full ASGI 2.0 specification as well as the +websocket response and HTTP/2 server push extensions. For those of you +familar with Flask, Quart extends the Flask-API by adding support for, + +- HTTP/1.1 request streaming. +- Websockets. +- HTTP/2 server push. + +Note that not all ASGI servers support these features, for this reason +the recommended server is `Hypercorn +`_. + +Contributing +------------ + +Quart is developed on `GitLab +`_. You are very welcome to open +`issues `_ or propose `merge +requests `_. + +Testing +~~~~~~~ + +The best way to test Quart is with Tox, + +.. code-block:: console + + $ pipenv install tox + $ tox + +this will check the code style and run the tests. + +Help +---- + +The Quart `documentation `_ is the +best place to start, after that try opening an `issue +`_. + +API Compatibility with Flask +---------------------------- + +The Flask API can be described as consisting of the Flask public and +private APIs and Werkzeug upon which Flask is based. Quart is designed +to be fully compatible with the Flask public API (aside from async and +await keywords). Thereafter the aim is to be mostly compatible with +the Flask private API and to provide no guarantees about the Werkzeug +API. + +Migrating from Flask +~~~~~~~~~~~~~~~~~~~~ + +It should be possible to migrate to Quart from Flask by a find and +replace of ``flask`` to ``quart`` and then adding ``async`` and +``await`` keywords. See the `docs +`_ for full +details. + + +.. |Build Status| image:: https://gitlab.com/pgjones/quart/badges/master/build.svg + :target: https://gitlab.com/pgjones/quart/commits/master + +.. |docs| image:: https://img.shields.io/badge/docs-passing-brightgreen.svg + :target: https://pgjones.gitlab.io/quart/ + +.. |pypi| image:: https://img.shields.io/pypi/v/quart.svg + :target: https://pypi.python.org/pypi/Quart/ + +.. |http| image:: https://img.shields.io/badge/http-1.0,1.1,2-orange.svg + :target: https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol + +.. |python| image:: https://img.shields.io/pypi/pyversions/quart.svg + :target: https://pypi.python.org/pypi/Quart/ + +.. |license| image:: https://img.shields.io/badge/license-MIT-blue.svg + :target: https://gitlab.com/pgjones/quart/blob/master/LICENSE + +.. |chat| image:: https://img.shields.io/badge/zulip-join_chat-brightgreen.svg + :target: https://quart.zulipchat.com + + diff --git a/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/RECORD b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/RECORD new file mode 100644 index 0000000..fc96d53 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/RECORD @@ -0,0 +1,82 @@ +quart/__about__.py,sha256=vwo0iZODdxvmXKhXpjohNARfis5ATfZ9BqDl5dnEIbI,23 +quart/__init__.py,sha256=7ym0ygllhtvjlHdd8XEoKKT7_HPJsACH7o8vl3heNV4,2278 +quart/__main__.py,sha256=2wjJPPid_ntwG6Esmk2lxRLj3ABV1Nsdj3u1jB8jHlo,78 +quart/app.py,sha256=Pl_o9D5_45nrmtA58h83kTnm6UcPwWsDnLLLC1PqFZ4,64309 +quart/asgi.py,sha256=J6a4d1ZHp9_LPHOhINpqq25iOc6SEYypYi5qZvs6t8A,7012 +quart/blueprints.py,sha256=zV8ZLJe4AN7SHoxVlDFHNuiyJIKMN1beKLJeSSrkBHc,26237 +quart/cli.py,sha256=Ssgzn3cSNnlZ3czfVBGTByZfkOhbYTLKFs-ILL9xuck,6294 +quart/config.py,sha256=-p9hDARvUO6dRpbxm-4fHautBPsO733FaBRHswICtPo,8923 +quart/ctx.py,sha256=NJdC97ZBlEEoK3QeqPG9Wqxgg6FXXCBoFaPjMwOpvN0,12881 +quart/datastructures.py,sha256=mdpxzUlbUC9fCU5Z-WD70TquDYo_-UrUByresdj-DbY,20995 +quart/debug.py,sha256=HPp4USigoyIygCaZQWJs5Yi6-uAjaTmXPJKX7fweB2Y,2011 +quart/exceptions.py,sha256=pCDaQKdif50vh9F_uGAgkO9ZKkDnurDaKYsjkaKbNPA,3879 +quart/globals.py,sha256=MVYya_q9vO9fOb-7zPQFNlzQuYZJLzQSCK1lEtMezUE,1019 +quart/helpers.py,sha256=9lHCgo25FZr117DUGQUKfJ9U1WNgnGWreaB4w6aUkoo,6531 +quart/local.py,sha256=Z0H3GggPiYxx9GFdoF2p52qAjS_fYn5NmH1Wow53vn4,7714 +quart/logging.py,sha256=NqOL38JXIlkMDLZmLQ3hki3y3UPnDmJRizuNvFDzkt4,2738 +quart/routing.py,sha256=sM7kbQtayFctqBvuTC2n7XccBY_bQpocbaASjxXcRlM,17775 +quart/sessions.py,sha256=d5OS8q2C3rxI0Tu0_P9Z65sdjU4nOs_XXYnpg_yLdo4,9340 +quart/signals.py,sha256=yy-I-2EkGmwlR4vgEbHtRwLTAGYg4exG7g3oUHopOko,4255 +quart/static.py,sha256=wqw66Vz4JPN1vXI9oNIcZRlzx-xRtJQP1GvkLmj4Rp8,6043 +quart/templating.py,sha256=w1PO03ML2aUGf0g7ouoxmSme0IH_WTKFq-Oms9GfhwU,3730 +quart/testing.py,sha256=_K3q9fOqWZRkiGH57ZEdHqeqfZr4boloB9qwPXxCF-8,10156 +quart/typing.py,sha256=YFyXwUCLfwGaz1trRBMUWUaoVcgbrFKfUr_nvM9Zv7k,470 +quart/utils.py,sha256=hoa_7-V7XNDagYIPww-TCMcSqpsVIAJMsRzgmWSzwOM,2196 +quart/views.py,sha256=yxFzsQOOe0Qqwex-edvHIq82KL7DyPSPplxY_ZLJEQo,3607 +quart/worker.py,sha256=uxTRGXfmcrXLSMrezEd5loO4N3kPPr8YaJHwzFjEykE,3629 +quart/flask_patch/__init__.py,sha256=v_nYISvUh6toh9xpStrQWUS1W59weLkHeDSoUUEyrO8,2419 +quart/flask_patch/_patch.py,sha256=OS_rOEC-vxiJptjeGQ-bvWr2ZjovyIjQdD3JOsFBfZk,3419 +quart/flask_patch/_synchronise.py,sha256=N92QngWDVmcjUzsLu9gpZRvmD6CkhDbdtPCDj6VdeCA,745 +quart/flask_patch/app.py,sha256=NiOxxNEWXUDJj3sZ_JoI1TFkQ_pjKC-tkuFt2yJ_OnI,1549 +quart/flask_patch/globals.py,sha256=nZg3s011InLpOieWsKHvnd6188W0Y0iD5w1TDCjPFfc,1094 +quart/json/__init__.py,sha256=XN9jPHKZHi-XTf9rSyxOvf6adHlyhjI8LGFwrb4BcFM,3198 +quart/json/tag.py,sha256=pl8NCClWogDVf11ml7mWFCFn_5onIz6xS-WPLG0LN58,4848 +quart/wrappers/__init__.py,sha256=_HbJz3YMG0hgCHSlQTH9Vh_R56CxOSDnWTD6H6ElM_8,222 +quart/wrappers/_base.py,sha256=KFGZ6yJjEeNpsLx58w501F3VYkpak57DhNUxCCImtKE,11974 +quart/wrappers/request.py,sha256=u9D2PmL0KmIOtqqklU2IzCqdrHJzLEcLF3yzBzDdfGg,10574 +quart/wrappers/response.py,sha256=EaNvUbg5IDBOXk3jogUq1V_tHDxEeElWY0VguTPpnck,15267 +Quart-0.6.13.dist-info/LICENSE,sha256=QtV1hUGogaFLFjgCXcQFOnFPxTkLUcwY43VJ6DXJKSg,1050 +Quart-0.6.13.dist-info/METADATA,sha256=wPTeB5MuDPmXaXH1WZJn9qqnFHw3qp9hgkXPsUJ_FFc,5391 +Quart-0.6.13.dist-info/WHEEL,sha256=_NOXIqFgOaYmlm9RJLPQZ13BJuEIrp5jx5ptRD5uh3Y,92 +Quart-0.6.13.dist-info/entry_points.txt,sha256=RqA0UUUkmZOtSvbyqsevnfBFUHUEcDpUCikcTO9czIw,42 +Quart-0.6.13.dist-info/top_level.txt,sha256=F_INWWD_HZA6avzVV_hQKfjEnZSlUUe6Y0XDjg3zABo,6 +Quart-0.6.13.dist-info/RECORD,, +../../../bin/quart,sha256=tihlfiCIPT8NcKoyH0E4h3_tFi4Yew98IKUeLVmBdtc,256 +Quart-0.6.13.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +quart/wrappers/__pycache__/response.cpython-36.pyc,, +quart/wrappers/__pycache__/request.cpython-36.pyc,, +quart/wrappers/__pycache__/__init__.cpython-36.pyc,, +quart/wrappers/__pycache__/_base.cpython-36.pyc,, +quart/__pycache__/testing.cpython-36.pyc,, +quart/__pycache__/asgi.cpython-36.pyc,, +quart/__pycache__/helpers.cpython-36.pyc,, +quart/__pycache__/worker.cpython-36.pyc,, +quart/__pycache__/local.cpython-36.pyc,, +quart/__pycache__/blueprints.cpython-36.pyc,, +quart/__pycache__/routing.cpython-36.pyc,, +quart/__pycache__/config.cpython-36.pyc,, +quart/__pycache__/globals.cpython-36.pyc,, +quart/__pycache__/views.cpython-36.pyc,, +quart/__pycache__/__about__.cpython-36.pyc,, +quart/__pycache__/__main__.cpython-36.pyc,, +quart/__pycache__/ctx.cpython-36.pyc,, +quart/__pycache__/typing.cpython-36.pyc,, +quart/__pycache__/app.cpython-36.pyc,, +quart/__pycache__/sessions.cpython-36.pyc,, +quart/__pycache__/__init__.cpython-36.pyc,, +quart/__pycache__/logging.cpython-36.pyc,, +quart/__pycache__/templating.cpython-36.pyc,, +quart/__pycache__/utils.cpython-36.pyc,, +quart/__pycache__/datastructures.cpython-36.pyc,, +quart/__pycache__/signals.cpython-36.pyc,, +quart/__pycache__/debug.cpython-36.pyc,, +quart/__pycache__/static.cpython-36.pyc,, +quart/__pycache__/cli.cpython-36.pyc,, +quart/__pycache__/exceptions.cpython-36.pyc,, +quart/flask_patch/__pycache__/_synchronise.cpython-36.pyc,, +quart/flask_patch/__pycache__/_patch.cpython-36.pyc,, +quart/flask_patch/__pycache__/globals.cpython-36.pyc,, +quart/flask_patch/__pycache__/app.cpython-36.pyc,, +quart/flask_patch/__pycache__/__init__.cpython-36.pyc,, +quart/json/__pycache__/tag.cpython-36.pyc,, +quart/json/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/WHEEL new file mode 100644 index 0000000..4eeaea1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.3) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/entry_points.txt b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/entry_points.txt new file mode 100644 index 0000000..38fa811 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +quart = quart.cli:main + diff --git a/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/top_level.txt new file mode 100644 index 0000000..60af440 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart-0.6.13.dist-info/top_level.txt @@ -0,0 +1 @@ +quart diff --git a/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/LICENSE b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/LICENSE new file mode 100644 index 0000000..8a5dad9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/LICENSE @@ -0,0 +1,22 @@ +Copyright P G Jones 2018. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/METADATA b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/METADATA new file mode 100644 index 0000000..4977c28 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/METADATA @@ -0,0 +1,159 @@ +Metadata-Version: 2.1 +Name: Quart-CORS +Version: 0.1.3 +Summary: A Quart extension to provide Cross Origin Resource Sharing, access control, support. +Home-page: https://gitlab.com/pgjones/quart-cors/ +Author: P G Jones +Author-email: philip.graham.jones@googlemail.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6.1 +Requires-Dist: Quart (>=0.6.11) + +Quart-CORS +========== + +|Build Status| |pypi| |python| |license| + +Quart-CORS is an extension for `Quart +`_ to handle `Cross Origin Resource +Sharing `_, CORS (also known as access +control). + +CORS is required to share resources in browsers due to the `Same +Origin Policy `_ +which prevents resources being read and limits write actions from a +different origin. An origin in this case is defined as the scheme, +host and port combination. + +In practice the Same Origin Policy means that a browser visiting +``http://quart.com`` will prevent the response of ``GET +http://api.com`` being read. It will also prevent requests such as +``POST http://api.com``. Note that embedding is allowed e.g. ````. + +CORS allows the server to indicate to the browser that certain +resources can be used. It does so for GET requests by returning +headers that indicate the origins that can use the resource whereas +for other requests the browser will send a preflight OPTIONS request +and then inspect the headers in the response. + +Simple (GET) requests should return CORS headers specifying the +allowed origins (which can be any origin, ``*``) and whether +credentials should be shared. If credentials can be shared the origins +must be specific and not a wildcard. + +Preflight requests should return CORS headers specifying the allowed +origins, methods and headers in the request, whehter credentials +should be shared and what response headers should be exposed. + +Usage +----- + +To add CORS access control headers to all of the routes in the +application, simply apply the ``cors`` function to the application, + +.. code-block:: python + + app = Quart(__name__) + app = cors(app) + +alternatively if you wish to add CORS selectively either apply the +``cors`` function to a blueprint or the ``route_cors`` function to +a route, + +.. code-block:: python + + blueprint = Blueprint(__name__) + blueprint = cors(blueprint) + + @blueprint.route('/') + @route_cors() + async def handler(): + ... + +In addition defaults can be specified in the application +configuration, + +============================ ======== +Configuration key type +---------------------------- -------- +QUART_CORS_ALLOW_CREDENTIALS bool +QUART_CORS_ALLOW_HEADERS Set[str] +QUART_CORS_ALLOW_METHODS Set[str] +QUART_CORS_ALLOW_ORIGIN Set[str] +QUART_CORS_EXPOSE_HEADERS Set[str] +QUART_CORS_MAX_AGE float +============================ ======== + +Both the ``cors`` and ``route_cors`` functions take these arguments, + +================= =========================== +Argument type +----------------- --------------------------- +allow_credentials bool +allow_headers Union[Set[str], str] +allow_methods Union[Set[str], str] +allow_origin Union[Set[str], str] +expose_headers Union[Set[str], str] +max_age Union[int, flot, timedelta] +================= =========================== + +.. note:: + + The Same Origin Policy does not apply to websockets, and hence this + extension does not add CORS headers to websocket routes. In + addition the ``route_cors`` function should not be used on a + websocket route. + +Contributing +------------ + +Quart-CORS is developed on `GitLab +`_. You are very welcome to +open `issues `_ or +propose `merge requests +`_. + +Testing +~~~~~~~ + +The best way to test Quart-CORS is with Tox, + +.. code-block:: console + + $ pipenv install tox + $ tox + +this will check the code style and run the tests. + +Help +---- + +This README is the best place to start, after that try opening an +`issue `_. + + +.. |Build Status| image:: https://gitlab.com/pgjones/quart-cors/badges/master/build.svg + :target: https://gitlab.com/pgjones/quart-cors/commits/master + +.. |pypi| image:: https://img.shields.io/pypi/v/quart-cors.svg + :target: https://pypi.python.org/pypi/Quart-CORS/ + +.. |python| image:: https://img.shields.io/pypi/pyversions/quart-cors.svg + :target: https://pypi.python.org/pypi/Quart-CORS/ + +.. |license| image:: https://img.shields.io/badge/license-MIT-blue.svg + :target: https://gitlab.com/pgjones/quart-cors/blob/master/LICENSE + + diff --git a/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/RECORD b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/RECORD new file mode 100644 index 0000000..9e06238 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/RECORD @@ -0,0 +1,8 @@ +quart_cors.py,sha256=BHNBom_DXEWfI7QBUAqY1iEequOlrTL8bKdIHJCK7aI,10588 +Quart_CORS-0.1.3.dist-info/LICENSE,sha256=HjGdhzPYNEZ3zQ7mwqjMBgHHRId5HViyayt-8GMMK38,1050 +Quart_CORS-0.1.3.dist-info/METADATA,sha256=hib8lN1d5pvMwH-jqd8vsDneGNDFCTg9-QKHNj6w23I,5231 +Quart_CORS-0.1.3.dist-info/WHEEL,sha256=_NOXIqFgOaYmlm9RJLPQZ13BJuEIrp5jx5ptRD5uh3Y,92 +Quart_CORS-0.1.3.dist-info/top_level.txt,sha256=8rvyefjLRk14p42yU4avg7pG-4TIDyYwin-uh5KVW_4,11 +Quart_CORS-0.1.3.dist-info/RECORD,, +Quart_CORS-0.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/quart_cors.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/WHEEL new file mode 100644 index 0000000..4eeaea1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.3) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/top_level.txt new file mode 100644 index 0000000..98a048a --- /dev/null +++ b/py3/lib/python3.6/site-packages/Quart_CORS-0.1.3.dist-info/top_level.txt @@ -0,0 +1 @@ +quart_cors diff --git a/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/PKG-INFO b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/PKG-INFO new file mode 100644 index 0000000..b31915a --- /dev/null +++ b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/PKG-INFO @@ -0,0 +1,181 @@ +Metadata-Version: 2.1 +Name: SQLAlchemy +Version: 1.3.7 +Summary: Database Abstraction Library +Home-page: http://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.sqlalchemy.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ +Description: SQLAlchemy + ========== + + The Python SQL Toolkit and Object Relational Mapper + + Introduction + ------------- + + SQLAlchemy is the Python SQL toolkit and Object Relational Mapper + that gives application developers the full power and + flexibility of SQL. SQLAlchemy provides a full suite + of well known enterprise-level persistence patterns, + designed for efficient and high-performing database + access, adapted into a simple and Pythonic domain + language. + + Major SQLAlchemy features include: + + * An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. + * A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. + * A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. + * A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. + * All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. + * Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + + SQLAlchemy's philosophy: + + * SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. + * An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. + * The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. + * With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. + * Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. + * Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. + * Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + + Documentation + ------------- + + Latest documentation is at: + + http://www.sqlalchemy.org/docs/ + + Installation / Requirements + --------------------------- + + Full documentation for installation is at + `Installation `_. + + Getting Help / Development / Bug reporting + ------------------------------------------ + + Please refer to the `SQLAlchemy Community Guide `_. + + Code of Conduct + --------------- + + Above all, SQLAlchemy places great emphasis on polite, thoughtful, and + constructive communication between users and developers. + Please see our current Code of Conduct at + `Code of Conduct `_. + + License + ------- + + SQLAlchemy is distributed under the `MIT license + `_. + + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Classifier: Operating System :: OS Independent +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Provides-Extra: mssql +Provides-Extra: mssql_pymssql +Provides-Extra: mssql_pyodbc +Provides-Extra: mysql +Provides-Extra: oracle +Provides-Extra: postgresql +Provides-Extra: postgresql_pg8000 +Provides-Extra: postgresql_psycopg2binary +Provides-Extra: postgresql_psycopg2cffi +Provides-Extra: pymysql diff --git a/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/SOURCES.txt b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/SOURCES.txt new file mode 100644 index 0000000..9defaa1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/SOURCES.txt @@ -0,0 +1,844 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +README.dialects.rst +README.rst +README.unittests.rst +setup.cfg +setup.py +tox.ini +doc/contents.html +doc/copyright.html +doc/errors.html +doc/genindex.html +doc/glossary.html +doc/index.html +doc/intro.html +doc/notfound.html +doc/search.html +doc/searchindex.js +doc/_images/sqla_arch_small.png +doc/_images/sqla_engine_arch.png +doc/_modules/index.html +doc/_modules/examples/adjacency_list/adjacency_list.html +doc/_modules/examples/association/basic_association.html +doc/_modules/examples/association/dict_of_sets_with_default.html +doc/_modules/examples/association/proxied_association.html +doc/_modules/examples/custom_attributes/active_column_defaults.html +doc/_modules/examples/custom_attributes/custom_management.html +doc/_modules/examples/custom_attributes/listen_for_events.html +doc/_modules/examples/dogpile_caching/advanced.html +doc/_modules/examples/dogpile_caching/caching_query.html +doc/_modules/examples/dogpile_caching/environment.html +doc/_modules/examples/dogpile_caching/fixture_data.html +doc/_modules/examples/dogpile_caching/helloworld.html +doc/_modules/examples/dogpile_caching/local_session_caching.html +doc/_modules/examples/dogpile_caching/model.html +doc/_modules/examples/dogpile_caching/relationship_caching.html +doc/_modules/examples/dynamic_dict/dynamic_dict.html +doc/_modules/examples/elementtree/adjacency_list.html +doc/_modules/examples/elementtree/optimized_al.html +doc/_modules/examples/elementtree/pickle_type.html +doc/_modules/examples/generic_associations/discriminator_on_association.html +doc/_modules/examples/generic_associations/generic_fk.html +doc/_modules/examples/generic_associations/table_per_association.html +doc/_modules/examples/generic_associations/table_per_related.html +doc/_modules/examples/graphs/directed_graph.html +doc/_modules/examples/inheritance/concrete.html +doc/_modules/examples/inheritance/joined.html +doc/_modules/examples/inheritance/single.html +doc/_modules/examples/join_conditions/cast.html +doc/_modules/examples/join_conditions/threeway.html +doc/_modules/examples/large_collection/large_collection.html +doc/_modules/examples/materialized_paths/materialized_paths.html +doc/_modules/examples/nested_sets/nested_sets.html +doc/_modules/examples/performance/__main__.html +doc/_modules/examples/performance/bulk_inserts.html +doc/_modules/examples/performance/bulk_updates.html +doc/_modules/examples/performance/large_resultsets.html +doc/_modules/examples/performance/short_selects.html +doc/_modules/examples/performance/single_inserts.html +doc/_modules/examples/postgis/postgis.html +doc/_modules/examples/sharding/attribute_shard.html +doc/_modules/examples/space_invaders/space_invaders.html +doc/_modules/examples/versioned_history/history_meta.html +doc/_modules/examples/versioned_history/test_versioning.html +doc/_modules/examples/versioned_rows/versioned_map.html +doc/_modules/examples/versioned_rows/versioned_rows.html +doc/_modules/examples/versioned_rows/versioned_rows_w_versionid.html +doc/_modules/examples/versioned_rows/versioned_update_old_row.html +doc/_modules/examples/vertical/dictlike-polymorphic.html +doc/_modules/examples/vertical/dictlike.html +doc/_static/basic.css +doc/_static/changelog.css +doc/_static/detectmobile.js +doc/_static/docs.css +doc/_static/doctools.js +doc/_static/documentation_options.js +doc/_static/dragons.png +doc/_static/file.png +doc/_static/init.js +doc/_static/jquery-3.2.1.js +doc/_static/jquery.js +doc/_static/language_data.js +doc/_static/minus.png +doc/_static/plus.png +doc/_static/pygments.css +doc/_static/searchtools.js +doc/_static/sphinx_paramlinks.css +doc/_static/underscore-1.3.1.js +doc/_static/underscore.js +doc/build/Makefile +doc/build/conf.py +doc/build/contents.rst +doc/build/copyright.rst +doc/build/corrections.py +doc/build/errors.rst +doc/build/glossary.rst +doc/build/index.rst +doc/build/intro.rst +doc/build/requirements.txt +doc/build/sqla_arch_small.png +doc/build/changelog/changelog_01.rst +doc/build/changelog/changelog_02.rst +doc/build/changelog/changelog_03.rst +doc/build/changelog/changelog_04.rst +doc/build/changelog/changelog_05.rst +doc/build/changelog/changelog_06.rst +doc/build/changelog/changelog_07.rst +doc/build/changelog/changelog_08.rst +doc/build/changelog/changelog_09.rst +doc/build/changelog/changelog_10.rst +doc/build/changelog/changelog_11.rst +doc/build/changelog/changelog_12.rst +doc/build/changelog/changelog_13.rst +doc/build/changelog/index.rst +doc/build/changelog/migration_04.rst +doc/build/changelog/migration_05.rst +doc/build/changelog/migration_06.rst +doc/build/changelog/migration_07.rst +doc/build/changelog/migration_08.rst +doc/build/changelog/migration_09.rst +doc/build/changelog/migration_10.rst +doc/build/changelog/migration_11.rst +doc/build/changelog/migration_12.rst +doc/build/changelog/migration_13.rst +doc/build/changelog/unreleased_10/4065.rst +doc/build/changelog/unreleased_10/README.txt +doc/build/changelog/unreleased_11/README.txt +doc/build/changelog/unreleased_12/README.txt +doc/build/changelog/unreleased_13/README.txt +doc/build/core/api_basics.rst +doc/build/core/compiler.rst +doc/build/core/connections.rst +doc/build/core/constraints.rst +doc/build/core/custom_types.rst +doc/build/core/ddl.rst +doc/build/core/defaults.rst +doc/build/core/dml.rst +doc/build/core/engines.rst +doc/build/core/engines_connections.rst +doc/build/core/event.rst +doc/build/core/events.rst +doc/build/core/exceptions.rst +doc/build/core/expression_api.rst +doc/build/core/functions.rst +doc/build/core/index.rst +doc/build/core/inspection.rst +doc/build/core/interfaces.rst +doc/build/core/internals.rst +doc/build/core/metadata.rst +doc/build/core/pooling.rst +doc/build/core/reflection.rst +doc/build/core/schema.rst +doc/build/core/selectable.rst +doc/build/core/serializer.rst +doc/build/core/sqla_engine_arch.png +doc/build/core/sqlelement.rst +doc/build/core/tutorial.rst +doc/build/core/type_api.rst +doc/build/core/type_basics.rst +doc/build/core/types.rst +doc/build/dialects/firebird.rst +doc/build/dialects/index.rst +doc/build/dialects/mssql.rst +doc/build/dialects/mysql.rst +doc/build/dialects/oracle.rst +doc/build/dialects/postgresql.rst +doc/build/dialects/sqlite.rst +doc/build/dialects/sybase.rst +doc/build/faq/connections.rst +doc/build/faq/index.rst +doc/build/faq/metadata_schema.rst +doc/build/faq/ormconfiguration.rst +doc/build/faq/performance.rst +doc/build/faq/sessions.rst +doc/build/faq/sqlexpressions.rst +doc/build/orm/backref.rst +doc/build/orm/basic_relationships.rst +doc/build/orm/cascades.rst +doc/build/orm/classical.rst +doc/build/orm/collections.rst +doc/build/orm/composites.rst +doc/build/orm/constructors.rst +doc/build/orm/contextual.rst +doc/build/orm/deprecated.rst +doc/build/orm/events.rst +doc/build/orm/examples.rst +doc/build/orm/exceptions.rst +doc/build/orm/extending.rst +doc/build/orm/index.rst +doc/build/orm/inheritance.rst +doc/build/orm/inheritance_loading.rst +doc/build/orm/internals.rst +doc/build/orm/join_conditions.rst +doc/build/orm/loading.rst +doc/build/orm/loading_columns.rst +doc/build/orm/loading_objects.rst +doc/build/orm/loading_relationships.rst +doc/build/orm/mapped_attributes.rst +doc/build/orm/mapped_sql_expr.rst +doc/build/orm/mapper_config.rst +doc/build/orm/mapping_api.rst +doc/build/orm/mapping_columns.rst +doc/build/orm/mapping_styles.rst +doc/build/orm/nonstandard_mappings.rst +doc/build/orm/persistence_techniques.rst +doc/build/orm/query.rst +doc/build/orm/relationship_api.rst +doc/build/orm/relationship_persistence.rst +doc/build/orm/relationships.rst +doc/build/orm/scalar_mapping.rst +doc/build/orm/self_referential.rst +doc/build/orm/session.rst +doc/build/orm/session_api.rst +doc/build/orm/session_basics.rst +doc/build/orm/session_events.rst +doc/build/orm/session_state_management.rst +doc/build/orm/session_transaction.rst +doc/build/orm/tutorial.rst +doc/build/orm/versioning.rst +doc/build/orm/extensions/associationproxy.rst +doc/build/orm/extensions/automap.rst +doc/build/orm/extensions/baked.rst +doc/build/orm/extensions/horizontal_shard.rst +doc/build/orm/extensions/hybrid.rst +doc/build/orm/extensions/index.rst +doc/build/orm/extensions/indexable.rst +doc/build/orm/extensions/instrumentation.rst +doc/build/orm/extensions/mutable.rst +doc/build/orm/extensions/orderinglist.rst +doc/build/orm/extensions/declarative/api.rst +doc/build/orm/extensions/declarative/basic_use.rst +doc/build/orm/extensions/declarative/index.rst +doc/build/orm/extensions/declarative/inheritance.rst +doc/build/orm/extensions/declarative/mixins.rst +doc/build/orm/extensions/declarative/relationships.rst +doc/build/orm/extensions/declarative/table_config.rst +doc/build/texinputs/Makefile +doc/build/texinputs/sphinx.sty +doc/changelog/changelog_01.html +doc/changelog/changelog_02.html +doc/changelog/changelog_03.html +doc/changelog/changelog_04.html +doc/changelog/changelog_05.html +doc/changelog/changelog_06.html +doc/changelog/changelog_07.html +doc/changelog/changelog_08.html +doc/changelog/changelog_09.html +doc/changelog/changelog_10.html +doc/changelog/changelog_11.html +doc/changelog/changelog_12.html +doc/changelog/changelog_13.html +doc/changelog/index.html +doc/changelog/migration_04.html +doc/changelog/migration_05.html +doc/changelog/migration_06.html +doc/changelog/migration_07.html +doc/changelog/migration_08.html +doc/changelog/migration_09.html +doc/changelog/migration_10.html +doc/changelog/migration_11.html +doc/changelog/migration_12.html +doc/changelog/migration_13.html +doc/core/api_basics.html +doc/core/compiler.html +doc/core/connections.html +doc/core/constraints.html +doc/core/custom_types.html +doc/core/ddl.html +doc/core/defaults.html +doc/core/dml.html +doc/core/engines.html +doc/core/engines_connections.html +doc/core/event.html +doc/core/events.html +doc/core/exceptions.html +doc/core/expression_api.html +doc/core/functions.html +doc/core/index.html +doc/core/inspection.html +doc/core/interfaces.html +doc/core/internals.html +doc/core/metadata.html +doc/core/pooling.html +doc/core/reflection.html +doc/core/schema.html +doc/core/selectable.html +doc/core/serializer.html +doc/core/sqlelement.html +doc/core/tutorial.html +doc/core/type_api.html +doc/core/type_basics.html +doc/core/types.html +doc/dialects/firebird.html +doc/dialects/index.html +doc/dialects/mssql.html +doc/dialects/mysql.html +doc/dialects/oracle.html +doc/dialects/postgresql.html +doc/dialects/sqlite.html +doc/dialects/sybase.html +doc/faq/connections.html +doc/faq/index.html +doc/faq/metadata_schema.html +doc/faq/ormconfiguration.html +doc/faq/performance.html +doc/faq/sessions.html +doc/faq/sqlexpressions.html +doc/orm/backref.html +doc/orm/basic_relationships.html +doc/orm/cascades.html +doc/orm/classical.html +doc/orm/collections.html +doc/orm/composites.html +doc/orm/constructors.html +doc/orm/contextual.html +doc/orm/deprecated.html +doc/orm/events.html +doc/orm/examples.html +doc/orm/exceptions.html +doc/orm/extending.html +doc/orm/index.html +doc/orm/inheritance.html +doc/orm/inheritance_loading.html +doc/orm/internals.html +doc/orm/join_conditions.html +doc/orm/loading.html +doc/orm/loading_columns.html +doc/orm/loading_objects.html +doc/orm/loading_relationships.html +doc/orm/mapped_attributes.html +doc/orm/mapped_sql_expr.html +doc/orm/mapper_config.html +doc/orm/mapping_api.html +doc/orm/mapping_columns.html +doc/orm/mapping_styles.html +doc/orm/nonstandard_mappings.html +doc/orm/persistence_techniques.html +doc/orm/query.html +doc/orm/relationship_api.html +doc/orm/relationship_persistence.html +doc/orm/relationships.html +doc/orm/scalar_mapping.html +doc/orm/self_referential.html +doc/orm/session.html +doc/orm/session_api.html +doc/orm/session_basics.html +doc/orm/session_events.html +doc/orm/session_state_management.html +doc/orm/session_transaction.html +doc/orm/tutorial.html +doc/orm/versioning.html +doc/orm/extensions/associationproxy.html +doc/orm/extensions/automap.html +doc/orm/extensions/baked.html +doc/orm/extensions/horizontal_shard.html +doc/orm/extensions/hybrid.html +doc/orm/extensions/index.html +doc/orm/extensions/indexable.html +doc/orm/extensions/instrumentation.html +doc/orm/extensions/mutable.html +doc/orm/extensions/orderinglist.html +doc/orm/extensions/declarative/api.html +doc/orm/extensions/declarative/basic_use.html +doc/orm/extensions/declarative/index.html +doc/orm/extensions/declarative/inheritance.html +doc/orm/extensions/declarative/mixins.html +doc/orm/extensions/declarative/relationships.html +doc/orm/extensions/declarative/table_config.html +examples/__init__.py +examples/adjacency_list/__init__.py +examples/adjacency_list/adjacency_list.py +examples/association/__init__.py +examples/association/basic_association.py +examples/association/dict_of_sets_with_default.py +examples/association/proxied_association.py +examples/custom_attributes/__init__.py +examples/custom_attributes/active_column_defaults.py +examples/custom_attributes/custom_management.py +examples/custom_attributes/listen_for_events.py +examples/dogpile_caching/__init__.py +examples/dogpile_caching/advanced.py +examples/dogpile_caching/caching_query.py +examples/dogpile_caching/environment.py +examples/dogpile_caching/fixture_data.py +examples/dogpile_caching/helloworld.py +examples/dogpile_caching/local_session_caching.py +examples/dogpile_caching/model.py +examples/dogpile_caching/relationship_caching.py +examples/dynamic_dict/__init__.py +examples/dynamic_dict/dynamic_dict.py +examples/elementtree/__init__.py +examples/elementtree/adjacency_list.py +examples/elementtree/optimized_al.py +examples/elementtree/pickle_type.py +examples/elementtree/test.xml +examples/elementtree/test2.xml +examples/elementtree/test3.xml +examples/generic_associations/__init__.py +examples/generic_associations/discriminator_on_association.py +examples/generic_associations/generic_fk.py +examples/generic_associations/table_per_association.py +examples/generic_associations/table_per_related.py +examples/graphs/__init__.py +examples/graphs/directed_graph.py +examples/inheritance/__init__.py +examples/inheritance/concrete.py +examples/inheritance/joined.py +examples/inheritance/single.py +examples/join_conditions/__init__.py +examples/join_conditions/cast.py +examples/join_conditions/threeway.py +examples/large_collection/__init__.py +examples/large_collection/large_collection.py +examples/materialized_paths/__init__.py +examples/materialized_paths/materialized_paths.py +examples/nested_sets/__init__.py +examples/nested_sets/nested_sets.py +examples/performance/__init__.py +examples/performance/__main__.py +examples/performance/bulk_inserts.py +examples/performance/bulk_updates.py +examples/performance/large_resultsets.py +examples/performance/short_selects.py +examples/performance/single_inserts.py +examples/postgis/__init__.py +examples/postgis/postgis.py +examples/sharding/__init__.py +examples/sharding/attribute_shard.py +examples/space_invaders/__init__.py +examples/space_invaders/space_invaders.py +examples/versioned_history/__init__.py +examples/versioned_history/history_meta.py +examples/versioned_history/test_versioning.py +examples/versioned_rows/__init__.py +examples/versioned_rows/versioned_map.py +examples/versioned_rows/versioned_rows.py +examples/versioned_rows/versioned_rows_w_versionid.py +examples/versioned_rows/versioned_update_old_row.py +examples/vertical/__init__.py +examples/vertical/dictlike-polymorphic.py +examples/vertical/dictlike.py +lib/SQLAlchemy.egg-info/PKG-INFO +lib/SQLAlchemy.egg-info/SOURCES.txt +lib/SQLAlchemy.egg-info/dependency_links.txt +lib/SQLAlchemy.egg-info/requires.txt +lib/SQLAlchemy.egg-info/top_level.txt +lib/sqlalchemy/__init__.py +lib/sqlalchemy/events.py +lib/sqlalchemy/exc.py +lib/sqlalchemy/inspection.py +lib/sqlalchemy/interfaces.py +lib/sqlalchemy/log.py +lib/sqlalchemy/processors.py +lib/sqlalchemy/schema.py +lib/sqlalchemy/types.py +lib/sqlalchemy/cextension/processors.c +lib/sqlalchemy/cextension/resultproxy.c +lib/sqlalchemy/cextension/utils.c +lib/sqlalchemy/connectors/__init__.py +lib/sqlalchemy/connectors/mxodbc.py +lib/sqlalchemy/connectors/pyodbc.py +lib/sqlalchemy/connectors/zxJDBC.py +lib/sqlalchemy/databases/__init__.py +lib/sqlalchemy/dialects/__init__.py +lib/sqlalchemy/dialects/type_migration_guidelines.txt +lib/sqlalchemy/dialects/firebird/__init__.py +lib/sqlalchemy/dialects/firebird/base.py +lib/sqlalchemy/dialects/firebird/fdb.py +lib/sqlalchemy/dialects/firebird/kinterbasdb.py +lib/sqlalchemy/dialects/mssql/__init__.py +lib/sqlalchemy/dialects/mssql/adodbapi.py +lib/sqlalchemy/dialects/mssql/base.py +lib/sqlalchemy/dialects/mssql/information_schema.py +lib/sqlalchemy/dialects/mssql/mxodbc.py +lib/sqlalchemy/dialects/mssql/pymssql.py +lib/sqlalchemy/dialects/mssql/pyodbc.py +lib/sqlalchemy/dialects/mssql/zxjdbc.py +lib/sqlalchemy/dialects/mysql/__init__.py +lib/sqlalchemy/dialects/mysql/base.py +lib/sqlalchemy/dialects/mysql/cymysql.py +lib/sqlalchemy/dialects/mysql/dml.py +lib/sqlalchemy/dialects/mysql/enumerated.py +lib/sqlalchemy/dialects/mysql/gaerdbms.py +lib/sqlalchemy/dialects/mysql/json.py +lib/sqlalchemy/dialects/mysql/mysqlconnector.py +lib/sqlalchemy/dialects/mysql/mysqldb.py +lib/sqlalchemy/dialects/mysql/oursql.py +lib/sqlalchemy/dialects/mysql/pymysql.py +lib/sqlalchemy/dialects/mysql/pyodbc.py +lib/sqlalchemy/dialects/mysql/reflection.py +lib/sqlalchemy/dialects/mysql/types.py +lib/sqlalchemy/dialects/mysql/zxjdbc.py +lib/sqlalchemy/dialects/oracle/__init__.py +lib/sqlalchemy/dialects/oracle/base.py +lib/sqlalchemy/dialects/oracle/cx_oracle.py +lib/sqlalchemy/dialects/oracle/zxjdbc.py +lib/sqlalchemy/dialects/postgresql/__init__.py +lib/sqlalchemy/dialects/postgresql/array.py +lib/sqlalchemy/dialects/postgresql/base.py +lib/sqlalchemy/dialects/postgresql/dml.py +lib/sqlalchemy/dialects/postgresql/ext.py +lib/sqlalchemy/dialects/postgresql/hstore.py +lib/sqlalchemy/dialects/postgresql/json.py +lib/sqlalchemy/dialects/postgresql/pg8000.py +lib/sqlalchemy/dialects/postgresql/psycopg2.py +lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py +lib/sqlalchemy/dialects/postgresql/pygresql.py +lib/sqlalchemy/dialects/postgresql/pypostgresql.py +lib/sqlalchemy/dialects/postgresql/ranges.py +lib/sqlalchemy/dialects/postgresql/zxjdbc.py +lib/sqlalchemy/dialects/sqlite/__init__.py +lib/sqlalchemy/dialects/sqlite/base.py +lib/sqlalchemy/dialects/sqlite/json.py +lib/sqlalchemy/dialects/sqlite/pysqlcipher.py +lib/sqlalchemy/dialects/sqlite/pysqlite.py +lib/sqlalchemy/dialects/sybase/__init__.py +lib/sqlalchemy/dialects/sybase/base.py +lib/sqlalchemy/dialects/sybase/mxodbc.py +lib/sqlalchemy/dialects/sybase/pyodbc.py +lib/sqlalchemy/dialects/sybase/pysybase.py +lib/sqlalchemy/engine/__init__.py +lib/sqlalchemy/engine/base.py +lib/sqlalchemy/engine/default.py +lib/sqlalchemy/engine/interfaces.py +lib/sqlalchemy/engine/reflection.py +lib/sqlalchemy/engine/result.py +lib/sqlalchemy/engine/strategies.py +lib/sqlalchemy/engine/threadlocal.py +lib/sqlalchemy/engine/url.py +lib/sqlalchemy/engine/util.py +lib/sqlalchemy/event/__init__.py +lib/sqlalchemy/event/api.py +lib/sqlalchemy/event/attr.py +lib/sqlalchemy/event/base.py +lib/sqlalchemy/event/legacy.py +lib/sqlalchemy/event/registry.py +lib/sqlalchemy/ext/__init__.py +lib/sqlalchemy/ext/associationproxy.py +lib/sqlalchemy/ext/automap.py +lib/sqlalchemy/ext/baked.py +lib/sqlalchemy/ext/compiler.py +lib/sqlalchemy/ext/horizontal_shard.py +lib/sqlalchemy/ext/hybrid.py +lib/sqlalchemy/ext/indexable.py +lib/sqlalchemy/ext/instrumentation.py +lib/sqlalchemy/ext/mutable.py +lib/sqlalchemy/ext/orderinglist.py +lib/sqlalchemy/ext/serializer.py +lib/sqlalchemy/ext/declarative/__init__.py +lib/sqlalchemy/ext/declarative/api.py +lib/sqlalchemy/ext/declarative/base.py +lib/sqlalchemy/ext/declarative/clsregistry.py +lib/sqlalchemy/orm/__init__.py +lib/sqlalchemy/orm/attributes.py +lib/sqlalchemy/orm/base.py +lib/sqlalchemy/orm/collections.py +lib/sqlalchemy/orm/dependency.py +lib/sqlalchemy/orm/deprecated_interfaces.py +lib/sqlalchemy/orm/descriptor_props.py +lib/sqlalchemy/orm/dynamic.py +lib/sqlalchemy/orm/evaluator.py +lib/sqlalchemy/orm/events.py +lib/sqlalchemy/orm/exc.py +lib/sqlalchemy/orm/identity.py +lib/sqlalchemy/orm/instrumentation.py +lib/sqlalchemy/orm/interfaces.py +lib/sqlalchemy/orm/loading.py +lib/sqlalchemy/orm/mapper.py +lib/sqlalchemy/orm/path_registry.py +lib/sqlalchemy/orm/persistence.py +lib/sqlalchemy/orm/properties.py +lib/sqlalchemy/orm/query.py +lib/sqlalchemy/orm/relationships.py +lib/sqlalchemy/orm/scoping.py +lib/sqlalchemy/orm/session.py +lib/sqlalchemy/orm/state.py +lib/sqlalchemy/orm/strategies.py +lib/sqlalchemy/orm/strategy_options.py +lib/sqlalchemy/orm/sync.py +lib/sqlalchemy/orm/unitofwork.py +lib/sqlalchemy/orm/util.py +lib/sqlalchemy/pool/__init__.py +lib/sqlalchemy/pool/base.py +lib/sqlalchemy/pool/dbapi_proxy.py +lib/sqlalchemy/pool/impl.py +lib/sqlalchemy/sql/__init__.py +lib/sqlalchemy/sql/annotation.py +lib/sqlalchemy/sql/base.py +lib/sqlalchemy/sql/compiler.py +lib/sqlalchemy/sql/crud.py +lib/sqlalchemy/sql/ddl.py +lib/sqlalchemy/sql/default_comparator.py +lib/sqlalchemy/sql/dml.py +lib/sqlalchemy/sql/elements.py +lib/sqlalchemy/sql/expression.py +lib/sqlalchemy/sql/functions.py +lib/sqlalchemy/sql/naming.py +lib/sqlalchemy/sql/operators.py +lib/sqlalchemy/sql/schema.py +lib/sqlalchemy/sql/selectable.py +lib/sqlalchemy/sql/sqltypes.py +lib/sqlalchemy/sql/type_api.py +lib/sqlalchemy/sql/util.py +lib/sqlalchemy/sql/visitors.py +lib/sqlalchemy/testing/__init__.py +lib/sqlalchemy/testing/assertions.py +lib/sqlalchemy/testing/assertsql.py +lib/sqlalchemy/testing/config.py +lib/sqlalchemy/testing/engines.py +lib/sqlalchemy/testing/entities.py +lib/sqlalchemy/testing/exclusions.py +lib/sqlalchemy/testing/fixtures.py +lib/sqlalchemy/testing/mock.py +lib/sqlalchemy/testing/pickleable.py +lib/sqlalchemy/testing/profiling.py +lib/sqlalchemy/testing/provision.py +lib/sqlalchemy/testing/replay_fixture.py +lib/sqlalchemy/testing/requirements.py +lib/sqlalchemy/testing/schema.py +lib/sqlalchemy/testing/util.py +lib/sqlalchemy/testing/warnings.py +lib/sqlalchemy/testing/plugin/__init__.py +lib/sqlalchemy/testing/plugin/bootstrap.py +lib/sqlalchemy/testing/plugin/plugin_base.py +lib/sqlalchemy/testing/plugin/pytestplugin.py +lib/sqlalchemy/testing/suite/__init__.py +lib/sqlalchemy/testing/suite/test_cte.py +lib/sqlalchemy/testing/suite/test_ddl.py +lib/sqlalchemy/testing/suite/test_dialect.py +lib/sqlalchemy/testing/suite/test_insert.py +lib/sqlalchemy/testing/suite/test_reflection.py +lib/sqlalchemy/testing/suite/test_results.py +lib/sqlalchemy/testing/suite/test_select.py +lib/sqlalchemy/testing/suite/test_sequence.py +lib/sqlalchemy/testing/suite/test_types.py +lib/sqlalchemy/testing/suite/test_update_delete.py +lib/sqlalchemy/util/__init__.py +lib/sqlalchemy/util/_collections.py +lib/sqlalchemy/util/compat.py +lib/sqlalchemy/util/deprecations.py +lib/sqlalchemy/util/langhelpers.py +lib/sqlalchemy/util/queue.py +lib/sqlalchemy/util/topological.py +test/__init__.py +test/binary_data_one.dat +test/binary_data_two.dat +test/conftest.py +test/requirements.py +test/aaa_profiling/__init__.py +test/aaa_profiling/test_compiler.py +test/aaa_profiling/test_memusage.py +test/aaa_profiling/test_misc.py +test/aaa_profiling/test_orm.py +test/aaa_profiling/test_pool.py +test/aaa_profiling/test_resultset.py +test/aaa_profiling/test_zoomark.py +test/aaa_profiling/test_zoomark_orm.py +test/base/__init__.py +test/base/test_dependency.py +test/base/test_events.py +test/base/test_except.py +test/base/test_inspect.py +test/base/test_tutorials.py +test/base/test_utils.py +test/dialect/__init__.py +test/dialect/test_all.py +test/dialect/test_firebird.py +test/dialect/test_mxodbc.py +test/dialect/test_pyodbc.py +test/dialect/test_sqlite.py +test/dialect/test_suite.py +test/dialect/test_sybase.py +test/dialect/mssql/__init__.py +test/dialect/mssql/test_compiler.py +test/dialect/mssql/test_engine.py +test/dialect/mssql/test_query.py +test/dialect/mssql/test_reflection.py +test/dialect/mssql/test_types.py +test/dialect/mysql/__init__.py +test/dialect/mysql/test_compiler.py +test/dialect/mysql/test_dialect.py +test/dialect/mysql/test_for_update.py +test/dialect/mysql/test_on_duplicate.py +test/dialect/mysql/test_query.py +test/dialect/mysql/test_reflection.py +test/dialect/mysql/test_types.py +test/dialect/oracle/__init__.py +test/dialect/oracle/test_compiler.py +test/dialect/oracle/test_dialect.py +test/dialect/oracle/test_reflection.py +test/dialect/oracle/test_types.py +test/dialect/postgresql/__init__.py +test/dialect/postgresql/test_compiler.py +test/dialect/postgresql/test_dialect.py +test/dialect/postgresql/test_on_conflict.py +test/dialect/postgresql/test_query.py +test/dialect/postgresql/test_reflection.py +test/dialect/postgresql/test_types.py +test/engine/__init__.py +test/engine/test_bind.py +test/engine/test_ddlevents.py +test/engine/test_deprecations.py +test/engine/test_execute.py +test/engine/test_logging.py +test/engine/test_parseconnect.py +test/engine/test_pool.py +test/engine/test_processors.py +test/engine/test_reconnect.py +test/engine/test_reflection.py +test/engine/test_transaction.py +test/ext/__init__.py +test/ext/test_associationproxy.py +test/ext/test_automap.py +test/ext/test_baked.py +test/ext/test_compiler.py +test/ext/test_deprecations.py +test/ext/test_extendedattr.py +test/ext/test_horizontal_shard.py +test/ext/test_hybrid.py +test/ext/test_indexable.py +test/ext/test_mutable.py +test/ext/test_orderinglist.py +test/ext/test_serializer.py +test/ext/declarative/__init__.py +test/ext/declarative/test_basic.py +test/ext/declarative/test_clsregistry.py +test/ext/declarative/test_concurrency.py +test/ext/declarative/test_inheritance.py +test/ext/declarative/test_mixin.py +test/ext/declarative/test_reflection.py +test/orm/__init__.py +test/orm/_fixtures.py +test/orm/test_ac_relationships.py +test/orm/test_association.py +test/orm/test_assorted_eager.py +test/orm/test_attributes.py +test/orm/test_backref_mutations.py +test/orm/test_bind.py +test/orm/test_bulk.py +test/orm/test_bundle.py +test/orm/test_cascade.py +test/orm/test_collection.py +test/orm/test_compile.py +test/orm/test_composites.py +test/orm/test_cycles.py +test/orm/test_default_strategies.py +test/orm/test_defaults.py +test/orm/test_deferred.py +test/orm/test_deprecations.py +test/orm/test_descriptor.py +test/orm/test_dynamic.py +test/orm/test_eager_relations.py +test/orm/test_evaluator.py +test/orm/test_events.py +test/orm/test_expire.py +test/orm/test_froms.py +test/orm/test_generative.py +test/orm/test_hasparent.py +test/orm/test_immediate_load.py +test/orm/test_inspect.py +test/orm/test_instrumentation.py +test/orm/test_joins.py +test/orm/test_lazy_relations.py +test/orm/test_load_on_fks.py +test/orm/test_loading.py +test/orm/test_lockmode.py +test/orm/test_manytomany.py +test/orm/test_mapper.py +test/orm/test_merge.py +test/orm/test_naturalpks.py +test/orm/test_of_type.py +test/orm/test_onetoone.py +test/orm/test_options.py +test/orm/test_pickled.py +test/orm/test_query.py +test/orm/test_rel_fn.py +test/orm/test_relationships.py +test/orm/test_scoping.py +test/orm/test_selectable.py +test/orm/test_selectin_relations.py +test/orm/test_session.py +test/orm/test_subquery_relations.py +test/orm/test_sync.py +test/orm/test_transaction.py +test/orm/test_unitofwork.py +test/orm/test_unitofworkv2.py +test/orm/test_update_delete.py +test/orm/test_utils.py +test/orm/test_validators.py +test/orm/test_versioning.py +test/orm/inheritance/__init__.py +test/orm/inheritance/_poly_fixtures.py +test/orm/inheritance/test_abc_inheritance.py +test/orm/inheritance/test_abc_polymorphic.py +test/orm/inheritance/test_assorted_poly.py +test/orm/inheritance/test_basic.py +test/orm/inheritance/test_concrete.py +test/orm/inheritance/test_magazine.py +test/orm/inheritance/test_manytomany.py +test/orm/inheritance/test_poly_linked_list.py +test/orm/inheritance/test_poly_loading.py +test/orm/inheritance/test_poly_persistence.py +test/orm/inheritance/test_polymorphic_rel.py +test/orm/inheritance/test_productspec.py +test/orm/inheritance/test_relationship.py +test/orm/inheritance/test_selects.py +test/orm/inheritance/test_single.py +test/orm/inheritance/test_with_poly.py +test/perf/invalidate_stresstest.py +test/perf/orm2010.py +test/sql/__init__.py +test/sql/test_case_statement.py +test/sql/test_compiler.py +test/sql/test_constraints.py +test/sql/test_cte.py +test/sql/test_ddlemit.py +test/sql/test_defaults.py +test/sql/test_delete.py +test/sql/test_deprecations.py +test/sql/test_functions.py +test/sql/test_generative.py +test/sql/test_insert.py +test/sql/test_insert_exec.py +test/sql/test_inspect.py +test/sql/test_join_rewriting.py +test/sql/test_labels.py +test/sql/test_lateral.py +test/sql/test_metadata.py +test/sql/test_operators.py +test/sql/test_query.py +test/sql/test_quote.py +test/sql/test_resultset.py +test/sql/test_returning.py +test/sql/test_rowcount.py +test/sql/test_selectable.py +test/sql/test_tablesample.py +test/sql/test_text.py +test/sql/test_type_expressions.py +test/sql/test_types.py +test/sql/test_unicode.py +test/sql/test_update.py +test/sql/test_utils.py \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/dependency_links.txt b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/installed-files.txt b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/installed-files.txt new file mode 100644 index 0000000..fc5661a --- /dev/null +++ b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/installed-files.txt @@ -0,0 +1,394 @@ +../sqlalchemy/types.py +../sqlalchemy/inspection.py +../sqlalchemy/interfaces.py +../sqlalchemy/processors.py +../sqlalchemy/events.py +../sqlalchemy/schema.py +../sqlalchemy/__init__.py +../sqlalchemy/exc.py +../sqlalchemy/log.py +../sqlalchemy/util/compat.py +../sqlalchemy/util/topological.py +../sqlalchemy/util/deprecations.py +../sqlalchemy/util/queue.py +../sqlalchemy/util/__init__.py +../sqlalchemy/util/_collections.py +../sqlalchemy/util/langhelpers.py +../sqlalchemy/event/registry.py +../sqlalchemy/event/api.py +../sqlalchemy/event/base.py +../sqlalchemy/event/attr.py +../sqlalchemy/event/__init__.py +../sqlalchemy/event/legacy.py +../sqlalchemy/testing/replay_fixture.py +../sqlalchemy/testing/fixtures.py +../sqlalchemy/testing/config.py +../sqlalchemy/testing/profiling.py +../sqlalchemy/testing/requirements.py +../sqlalchemy/testing/entities.py +../sqlalchemy/testing/assertsql.py +../sqlalchemy/testing/assertions.py +../sqlalchemy/testing/engines.py +../sqlalchemy/testing/exclusions.py +../sqlalchemy/testing/pickleable.py +../sqlalchemy/testing/schema.py +../sqlalchemy/testing/__init__.py +../sqlalchemy/testing/util.py +../sqlalchemy/testing/warnings.py +../sqlalchemy/testing/provision.py +../sqlalchemy/testing/mock.py +../sqlalchemy/connectors/pyodbc.py +../sqlalchemy/connectors/mxodbc.py +../sqlalchemy/connectors/zxJDBC.py +../sqlalchemy/connectors/__init__.py +../sqlalchemy/sql/naming.py +../sqlalchemy/sql/compiler.py +../sqlalchemy/sql/dml.py +../sqlalchemy/sql/annotation.py +../sqlalchemy/sql/visitors.py +../sqlalchemy/sql/functions.py +../sqlalchemy/sql/sqltypes.py +../sqlalchemy/sql/base.py +../sqlalchemy/sql/type_api.py +../sqlalchemy/sql/elements.py +../sqlalchemy/sql/selectable.py +../sqlalchemy/sql/crud.py +../sqlalchemy/sql/operators.py +../sqlalchemy/sql/schema.py +../sqlalchemy/sql/expression.py +../sqlalchemy/sql/__init__.py +../sqlalchemy/sql/util.py +../sqlalchemy/sql/ddl.py +../sqlalchemy/sql/default_comparator.py +../sqlalchemy/pool/impl.py +../sqlalchemy/pool/base.py +../sqlalchemy/pool/dbapi_proxy.py +../sqlalchemy/pool/__init__.py +../sqlalchemy/ext/compiler.py +../sqlalchemy/ext/indexable.py +../sqlalchemy/ext/horizontal_shard.py +../sqlalchemy/ext/automap.py +../sqlalchemy/ext/mutable.py +../sqlalchemy/ext/associationproxy.py +../sqlalchemy/ext/hybrid.py +../sqlalchemy/ext/baked.py +../sqlalchemy/ext/orderinglist.py +../sqlalchemy/ext/instrumentation.py +../sqlalchemy/ext/serializer.py +../sqlalchemy/ext/__init__.py +../sqlalchemy/databases/__init__.py +../sqlalchemy/engine/threadlocal.py +../sqlalchemy/engine/result.py +../sqlalchemy/engine/interfaces.py +../sqlalchemy/engine/strategies.py +../sqlalchemy/engine/url.py +../sqlalchemy/engine/base.py +../sqlalchemy/engine/reflection.py +../sqlalchemy/engine/__init__.py +../sqlalchemy/engine/default.py +../sqlalchemy/engine/util.py +../sqlalchemy/dialects/__init__.py +../sqlalchemy/orm/identity.py +../sqlalchemy/orm/deprecated_interfaces.py +../sqlalchemy/orm/path_registry.py +../sqlalchemy/orm/mapper.py +../sqlalchemy/orm/session.py +../sqlalchemy/orm/relationships.py +../sqlalchemy/orm/collections.py +../sqlalchemy/orm/strategy_options.py +../sqlalchemy/orm/interfaces.py +../sqlalchemy/orm/loading.py +../sqlalchemy/orm/query.py +../sqlalchemy/orm/dynamic.py +../sqlalchemy/orm/strategies.py +../sqlalchemy/orm/attributes.py +../sqlalchemy/orm/events.py +../sqlalchemy/orm/base.py +../sqlalchemy/orm/sync.py +../sqlalchemy/orm/properties.py +../sqlalchemy/orm/instrumentation.py +../sqlalchemy/orm/state.py +../sqlalchemy/orm/persistence.py +../sqlalchemy/orm/scoping.py +../sqlalchemy/orm/dependency.py +../sqlalchemy/orm/descriptor_props.py +../sqlalchemy/orm/evaluator.py +../sqlalchemy/orm/__init__.py +../sqlalchemy/orm/util.py +../sqlalchemy/orm/exc.py +../sqlalchemy/orm/unitofwork.py +../sqlalchemy/testing/plugin/bootstrap.py +../sqlalchemy/testing/plugin/pytestplugin.py +../sqlalchemy/testing/plugin/__init__.py +../sqlalchemy/testing/plugin/plugin_base.py +../sqlalchemy/testing/suite/test_cte.py +../sqlalchemy/testing/suite/test_select.py +../sqlalchemy/testing/suite/test_ddl.py +../sqlalchemy/testing/suite/test_results.py +../sqlalchemy/testing/suite/test_dialect.py +../sqlalchemy/testing/suite/test_insert.py +../sqlalchemy/testing/suite/test_update_delete.py +../sqlalchemy/testing/suite/test_types.py +../sqlalchemy/testing/suite/test_sequence.py +../sqlalchemy/testing/suite/test_reflection.py +../sqlalchemy/testing/suite/__init__.py +../sqlalchemy/ext/declarative/clsregistry.py +../sqlalchemy/ext/declarative/api.py +../sqlalchemy/ext/declarative/base.py +../sqlalchemy/ext/declarative/__init__.py +../sqlalchemy/dialects/mysql/pymysql.py +../sqlalchemy/dialects/mysql/types.py +../sqlalchemy/dialects/mysql/dml.py +../sqlalchemy/dialects/mysql/zxjdbc.py +../sqlalchemy/dialects/mysql/pyodbc.py +../sqlalchemy/dialects/mysql/oursql.py +../sqlalchemy/dialects/mysql/mysqldb.py +../sqlalchemy/dialects/mysql/json.py +../sqlalchemy/dialects/mysql/enumerated.py +../sqlalchemy/dialects/mysql/base.py +../sqlalchemy/dialects/mysql/reflection.py +../sqlalchemy/dialects/mysql/mysqlconnector.py +../sqlalchemy/dialects/mysql/gaerdbms.py +../sqlalchemy/dialects/mysql/__init__.py +../sqlalchemy/dialects/mysql/cymysql.py +../sqlalchemy/dialects/postgresql/psycopg2.py +../sqlalchemy/dialects/postgresql/dml.py +../sqlalchemy/dialects/postgresql/zxjdbc.py +../sqlalchemy/dialects/postgresql/array.py +../sqlalchemy/dialects/postgresql/pypostgresql.py +../sqlalchemy/dialects/postgresql/json.py +../sqlalchemy/dialects/postgresql/psycopg2cffi.py +../sqlalchemy/dialects/postgresql/pygresql.py +../sqlalchemy/dialects/postgresql/ranges.py +../sqlalchemy/dialects/postgresql/base.py +../sqlalchemy/dialects/postgresql/hstore.py +../sqlalchemy/dialects/postgresql/__init__.py +../sqlalchemy/dialects/postgresql/pg8000.py +../sqlalchemy/dialects/postgresql/ext.py +../sqlalchemy/dialects/mssql/zxjdbc.py +../sqlalchemy/dialects/mssql/pyodbc.py +../sqlalchemy/dialects/mssql/information_schema.py +../sqlalchemy/dialects/mssql/mxodbc.py +../sqlalchemy/dialects/mssql/base.py +../sqlalchemy/dialects/mssql/pymssql.py +../sqlalchemy/dialects/mssql/adodbapi.py +../sqlalchemy/dialects/mssql/__init__.py +../sqlalchemy/dialects/sybase/pyodbc.py +../sqlalchemy/dialects/sybase/mxodbc.py +../sqlalchemy/dialects/sybase/pysybase.py +../sqlalchemy/dialects/sybase/base.py +../sqlalchemy/dialects/sybase/__init__.py +../sqlalchemy/dialects/oracle/zxjdbc.py +../sqlalchemy/dialects/oracle/base.py +../sqlalchemy/dialects/oracle/__init__.py +../sqlalchemy/dialects/oracle/cx_oracle.py +../sqlalchemy/dialects/sqlite/pysqlite.py +../sqlalchemy/dialects/sqlite/json.py +../sqlalchemy/dialects/sqlite/base.py +../sqlalchemy/dialects/sqlite/pysqlcipher.py +../sqlalchemy/dialects/sqlite/__init__.py +../sqlalchemy/dialects/firebird/kinterbasdb.py +../sqlalchemy/dialects/firebird/base.py +../sqlalchemy/dialects/firebird/fdb.py +../sqlalchemy/dialects/firebird/__init__.py +../sqlalchemy/__pycache__/types.cpython-36.pyc +../sqlalchemy/__pycache__/inspection.cpython-36.pyc +../sqlalchemy/__pycache__/interfaces.cpython-36.pyc +../sqlalchemy/__pycache__/processors.cpython-36.pyc +../sqlalchemy/__pycache__/events.cpython-36.pyc +../sqlalchemy/__pycache__/schema.cpython-36.pyc +../sqlalchemy/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/__pycache__/exc.cpython-36.pyc +../sqlalchemy/__pycache__/log.cpython-36.pyc +../sqlalchemy/util/__pycache__/compat.cpython-36.pyc +../sqlalchemy/util/__pycache__/topological.cpython-36.pyc +../sqlalchemy/util/__pycache__/deprecations.cpython-36.pyc +../sqlalchemy/util/__pycache__/queue.cpython-36.pyc +../sqlalchemy/util/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/util/__pycache__/_collections.cpython-36.pyc +../sqlalchemy/util/__pycache__/langhelpers.cpython-36.pyc +../sqlalchemy/event/__pycache__/registry.cpython-36.pyc +../sqlalchemy/event/__pycache__/api.cpython-36.pyc +../sqlalchemy/event/__pycache__/base.cpython-36.pyc +../sqlalchemy/event/__pycache__/attr.cpython-36.pyc +../sqlalchemy/event/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/event/__pycache__/legacy.cpython-36.pyc +../sqlalchemy/testing/__pycache__/replay_fixture.cpython-36.pyc +../sqlalchemy/testing/__pycache__/fixtures.cpython-36.pyc +../sqlalchemy/testing/__pycache__/config.cpython-36.pyc +../sqlalchemy/testing/__pycache__/profiling.cpython-36.pyc +../sqlalchemy/testing/__pycache__/requirements.cpython-36.pyc +../sqlalchemy/testing/__pycache__/entities.cpython-36.pyc +../sqlalchemy/testing/__pycache__/assertsql.cpython-36.pyc +../sqlalchemy/testing/__pycache__/assertions.cpython-36.pyc +../sqlalchemy/testing/__pycache__/engines.cpython-36.pyc +../sqlalchemy/testing/__pycache__/exclusions.cpython-36.pyc +../sqlalchemy/testing/__pycache__/pickleable.cpython-36.pyc +../sqlalchemy/testing/__pycache__/schema.cpython-36.pyc +../sqlalchemy/testing/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/testing/__pycache__/util.cpython-36.pyc +../sqlalchemy/testing/__pycache__/warnings.cpython-36.pyc +../sqlalchemy/testing/__pycache__/provision.cpython-36.pyc +../sqlalchemy/testing/__pycache__/mock.cpython-36.pyc +../sqlalchemy/connectors/__pycache__/pyodbc.cpython-36.pyc +../sqlalchemy/connectors/__pycache__/mxodbc.cpython-36.pyc +../sqlalchemy/connectors/__pycache__/zxJDBC.cpython-36.pyc +../sqlalchemy/connectors/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/sql/__pycache__/naming.cpython-36.pyc +../sqlalchemy/sql/__pycache__/compiler.cpython-36.pyc +../sqlalchemy/sql/__pycache__/dml.cpython-36.pyc +../sqlalchemy/sql/__pycache__/annotation.cpython-36.pyc +../sqlalchemy/sql/__pycache__/visitors.cpython-36.pyc +../sqlalchemy/sql/__pycache__/functions.cpython-36.pyc +../sqlalchemy/sql/__pycache__/sqltypes.cpython-36.pyc +../sqlalchemy/sql/__pycache__/base.cpython-36.pyc +../sqlalchemy/sql/__pycache__/type_api.cpython-36.pyc +../sqlalchemy/sql/__pycache__/elements.cpython-36.pyc +../sqlalchemy/sql/__pycache__/selectable.cpython-36.pyc +../sqlalchemy/sql/__pycache__/crud.cpython-36.pyc +../sqlalchemy/sql/__pycache__/operators.cpython-36.pyc +../sqlalchemy/sql/__pycache__/schema.cpython-36.pyc +../sqlalchemy/sql/__pycache__/expression.cpython-36.pyc +../sqlalchemy/sql/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/sql/__pycache__/util.cpython-36.pyc +../sqlalchemy/sql/__pycache__/ddl.cpython-36.pyc +../sqlalchemy/sql/__pycache__/default_comparator.cpython-36.pyc +../sqlalchemy/pool/__pycache__/impl.cpython-36.pyc +../sqlalchemy/pool/__pycache__/base.cpython-36.pyc +../sqlalchemy/pool/__pycache__/dbapi_proxy.cpython-36.pyc +../sqlalchemy/pool/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/ext/__pycache__/compiler.cpython-36.pyc +../sqlalchemy/ext/__pycache__/indexable.cpython-36.pyc +../sqlalchemy/ext/__pycache__/horizontal_shard.cpython-36.pyc +../sqlalchemy/ext/__pycache__/automap.cpython-36.pyc +../sqlalchemy/ext/__pycache__/mutable.cpython-36.pyc +../sqlalchemy/ext/__pycache__/associationproxy.cpython-36.pyc +../sqlalchemy/ext/__pycache__/hybrid.cpython-36.pyc +../sqlalchemy/ext/__pycache__/baked.cpython-36.pyc +../sqlalchemy/ext/__pycache__/orderinglist.cpython-36.pyc +../sqlalchemy/ext/__pycache__/instrumentation.cpython-36.pyc +../sqlalchemy/ext/__pycache__/serializer.cpython-36.pyc +../sqlalchemy/ext/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/databases/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/engine/__pycache__/threadlocal.cpython-36.pyc +../sqlalchemy/engine/__pycache__/result.cpython-36.pyc +../sqlalchemy/engine/__pycache__/interfaces.cpython-36.pyc +../sqlalchemy/engine/__pycache__/strategies.cpython-36.pyc +../sqlalchemy/engine/__pycache__/url.cpython-36.pyc +../sqlalchemy/engine/__pycache__/base.cpython-36.pyc +../sqlalchemy/engine/__pycache__/reflection.cpython-36.pyc +../sqlalchemy/engine/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/engine/__pycache__/default.cpython-36.pyc +../sqlalchemy/engine/__pycache__/util.cpython-36.pyc +../sqlalchemy/dialects/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/orm/__pycache__/identity.cpython-36.pyc +../sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-36.pyc +../sqlalchemy/orm/__pycache__/path_registry.cpython-36.pyc +../sqlalchemy/orm/__pycache__/mapper.cpython-36.pyc +../sqlalchemy/orm/__pycache__/session.cpython-36.pyc +../sqlalchemy/orm/__pycache__/relationships.cpython-36.pyc +../sqlalchemy/orm/__pycache__/collections.cpython-36.pyc +../sqlalchemy/orm/__pycache__/strategy_options.cpython-36.pyc +../sqlalchemy/orm/__pycache__/interfaces.cpython-36.pyc +../sqlalchemy/orm/__pycache__/loading.cpython-36.pyc +../sqlalchemy/orm/__pycache__/query.cpython-36.pyc +../sqlalchemy/orm/__pycache__/dynamic.cpython-36.pyc +../sqlalchemy/orm/__pycache__/strategies.cpython-36.pyc +../sqlalchemy/orm/__pycache__/attributes.cpython-36.pyc +../sqlalchemy/orm/__pycache__/events.cpython-36.pyc +../sqlalchemy/orm/__pycache__/base.cpython-36.pyc +../sqlalchemy/orm/__pycache__/sync.cpython-36.pyc +../sqlalchemy/orm/__pycache__/properties.cpython-36.pyc +../sqlalchemy/orm/__pycache__/instrumentation.cpython-36.pyc +../sqlalchemy/orm/__pycache__/state.cpython-36.pyc +../sqlalchemy/orm/__pycache__/persistence.cpython-36.pyc +../sqlalchemy/orm/__pycache__/scoping.cpython-36.pyc +../sqlalchemy/orm/__pycache__/dependency.cpython-36.pyc +../sqlalchemy/orm/__pycache__/descriptor_props.cpython-36.pyc +../sqlalchemy/orm/__pycache__/evaluator.cpython-36.pyc +../sqlalchemy/orm/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/orm/__pycache__/util.cpython-36.pyc +../sqlalchemy/orm/__pycache__/exc.cpython-36.pyc +../sqlalchemy/orm/__pycache__/unitofwork.cpython-36.pyc +../sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-36.pyc +../sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-36.pyc +../sqlalchemy/testing/plugin/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_cte.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_select.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_results.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_insert.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_types.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-36.pyc +../sqlalchemy/testing/suite/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-36.pyc +../sqlalchemy/ext/declarative/__pycache__/api.cpython-36.pyc +../sqlalchemy/ext/declarative/__pycache__/base.cpython-36.pyc +../sqlalchemy/ext/declarative/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/types.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/dml.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/json.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/base.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/array.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/json.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/base.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-36.pyc +../sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-36.pyc +../sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-36.pyc +../sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-36.pyc +../sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-36.pyc +../sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-36.pyc +../sqlalchemy/dialects/mssql/__pycache__/base.cpython-36.pyc +../sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-36.pyc +../sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-36.pyc +../sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-36.pyc +../sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-36.pyc +../sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-36.pyc +../sqlalchemy/dialects/sybase/__pycache__/base.cpython-36.pyc +../sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-36.pyc +../sqlalchemy/dialects/oracle/__pycache__/base.cpython-36.pyc +../sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-36.pyc +../sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-36.pyc +../sqlalchemy/dialects/sqlite/__pycache__/json.cpython-36.pyc +../sqlalchemy/dialects/sqlite/__pycache__/base.cpython-36.pyc +../sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-36.pyc +../sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-36.pyc +../sqlalchemy/dialects/firebird/__pycache__/base.cpython-36.pyc +../sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-36.pyc +../sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-36.pyc +../sqlalchemy/cprocessors.cpython-36m-x86_64-linux-gnu.so +../sqlalchemy/cresultproxy.cpython-36m-x86_64-linux-gnu.so +../sqlalchemy/cutils.cpython-36m-x86_64-linux-gnu.so +PKG-INFO +requires.txt +top_level.txt +SOURCES.txt +dependency_links.txt diff --git a/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/requires.txt b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/requires.txt new file mode 100644 index 0000000..7d73cfc --- /dev/null +++ b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/requires.txt @@ -0,0 +1,30 @@ + +[mssql] +pyodbc + +[mssql_pymssql] +pymssql + +[mssql_pyodbc] +pyodbc + +[mysql] +mysqlclient + +[oracle] +cx_oracle + +[postgresql] +psycopg2 + +[postgresql_pg8000] +pg8000 + +[postgresql_psycopg2binary] +psycopg2-binary + +[postgresql_psycopg2cffi] +psycopg2cffi + +[pymysql] +pymysql diff --git a/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/top_level.txt b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/top_level.txt new file mode 100644 index 0000000..39fb2be --- /dev/null +++ b/py3/lib/python3.6/site-packages/SQLAlchemy-1.3.7.egg-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/METADATA b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/METADATA new file mode 100644 index 0000000..8a77324 --- /dev/null +++ b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/METADATA @@ -0,0 +1,101 @@ +Metadata-Version: 2.1 +Name: WTForms +Version: 2.2.1 +Summary: A flexible forms validation and rendering library for Python web development. +Home-page: https://wtforms.readthedocs.io/ +Author: Thomas Johansson, James Crasta +Author-email: wtforms@simplecodes.com +Maintainer: WTForms team +Maintainer-email: davidism@gmail.com +License: BSD +Project-URL: Documentation, https://wtforms.readthedocs.io/ +Project-URL: Code, https://github.com/wtforms/wtforms +Project-URL: Issue tracker, https://github.com/wtforms/wtforms/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Provides-Extra: locale +Requires-Dist: ordereddict; python_version=="2.6" +Provides-Extra: locale +Requires-Dist: Babel (>=1.3); extra == 'locale' + +WTForms +======= + +WTForms is a flexible forms validation and rendering library for Python +web development. It is `framework agnostic`_ and can work with whatever +web framework and template engine you choose. There are various +community libraries that provide closer integration with popular +frameworks. + +To get started using WTForms, we recommend reading the `crash course`_ +in the docs. + +.. _crash course: https://wtforms.readthedocs.io/en/stable/crash_course.html +.. _framework agnostic: https://wtforms.readthedocs.io/en/stable/faq.html#does-wtforms-work-with-library-here + + +Installation +------------ + +Install and update using pip:: + + pip install -U WTForms + + +Third-Party Library Integrations +-------------------------------- + +WTForms is designed to work with any web framework and template engine. +There are a number of community-provided libraries that make integrating +with frameworks even better. + +- `Flask-WTF`_ integrates with the Flask framework. It can + automatically load data from the request, uses Flask-Babel to + translate based on user-selected locale, provides full-application + CSRF, and more. +- `WTForms-Alchemy`_ provides rich support for generating forms from + SQLAlchemy models, including an expanded set of fields and + validators. +- `WTForms-SQLAlchemy`_ provides ORM-backed fields and form generation + from SQLAlchemy models. +- `WTForms-AppEngine`_ provides ORM-backed fields and form generation + from AppEnding db/ndb schema +- `WTForms-AppEngine`_ provides ORM-backed fields and form generation + from Django models, as well as integration with Django's I18N + support. + +.. _Flask-WTF: https://flask-wtf.readthedocs.io/ +.. _WTForms-Alchemy: https://wtforms-alchemy.readthedocs.io/ +.. _WTForms-SQLAlchemy: https://github.com/wtforms/wtforms-sqlalchemy +.. _WTForms-AppEngine: https://github.com/wtforms/wtforms-appengine +.. _WTForms-Django: https://github.com/wtforms/wtforms-django + + +Links +----- + +- Documentation: https://wtforms.readthedocs.io/ +- License: `BSD `_ +- Releases: https://pypi.org/project/WTForms/ +- Code: https://github.com/wtforms/wtforms +- Issue tracker: https://github.com/wtforms/wtforms/issues +- Test status: + + - Linux: https://travis-ci.org/wtforms/wtforms + +- Test coverage: https://coveralls.io/github/wtforms/wtforms + + diff --git a/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/RECORD b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/RECORD new file mode 100644 index 0000000..c7865c2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/RECORD @@ -0,0 +1,147 @@ +WTForms-2.2.1.dist-info/METADATA,sha256=Aqv5s_FPo1o3VxjnX-nclKn2dBPIVOpTwggPPH-DJs0,3771 +WTForms-2.2.1.dist-info/RECORD,, +WTForms-2.2.1.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +WTForms-2.2.1.dist-info/top_level.txt,sha256=k5K62RAEkLEN23p118t3tRgvL6I_k56NiIU7Hk8Phv8,8 +wtforms/__init__.py,sha256=h4gmUHtk1Y9cGJ-l63rhrp-nC9REGdpcRPBGoJKP9hk,380 +wtforms/compat.py,sha256=buY-q7yLNO-2OlxA5QPAcdBO8urjZTtxvFnxg_1Euuo,589 +wtforms/form.py,sha256=ahME3_8CmTuvVsatV-AKqinBkOSEnLOE_nMeQLgrQEA,11608 +wtforms/i18n.py,sha256=RuMPdvfsxHGMqKySUy4DpMfEAzruPK_7gHe6GQTrekc,2175 +wtforms/meta.py,sha256=9yLQuKP4N_OiPBsPy3tBc7auldxhFryZweySDsKL8zI,3822 +wtforms/utils.py,sha256=Zg70vKv96pnHjrkSZ6KlzSo1noh20GV5IqfPy6FrOyA,1504 +wtforms/validators.py,sha256=niMtYGGRijIiZ2ruslYfRP7CTGDul_DHiR-iYen7zRg,19430 +wtforms/csrf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +wtforms/csrf/core.py,sha256=Ot8eOSAZ88qeDBlSUhRqiLfyWA13g3EFJ4zWZ7EGYnc,3157 +wtforms/csrf/session.py,sha256=baww8MJ5YObyYItXX0Vz5AjxZTdOfTqti3zsD3koka0,3056 +wtforms/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +wtforms/ext/appengine/__init__.py,sha256=xXkE1qkwzkkBw4o0YhWGZSZXcsV60DaLxX4fkxNcNe8,269 +wtforms/ext/appengine/db.py,sha256=IEJng34ztXLVSlLxneZ7M4kgGOZOPf9zR_6RTqv6Z1Q,18588 +wtforms/ext/appengine/fields.py,sha256=8Z2BJy7ft0fu_vZksneZ7xdVxdqHkWIMNjgnyfdKtho,7574 +wtforms/ext/appengine/ndb.py,sha256=szIwWA5FyD2lqZefayl__C2UsXMEAGQndqPYPhOH4Vk,17124 +wtforms/ext/csrf/__init__.py,sha256=bIQ48rbnoYrYPZkkGz04b_7PZ8leQY_CExEqYw8yitI,45 +wtforms/ext/csrf/fields.py,sha256=Ta3vLg9KQkpUTCnDF-7CP3IW11X0UqqhvL68sAopYTs,430 +wtforms/ext/csrf/form.py,sha256=ZxmvC3Um2qYeUncu6D390-W62mVQclzwPLP9_R7GedU,1785 +wtforms/ext/csrf/session.py,sha256=aKYb9_jgEmxIgvWuk0cdx9YAGTi9s3F4xy_0ibxyhbo,2627 +wtforms/ext/dateutil/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +wtforms/ext/dateutil/fields.py,sha256=RlupqB1WX_HiKJEYqi9IAxiCElxgbBDHHuXrGF4nbYs,3429 +wtforms/ext/django/__init__.py,sha256=OQ0wr3s5_cUmUU7htHXhobyxVWJS16Ve4qBK_PLs_rw,259 +wtforms/ext/django/fields.py,sha256=pEWxaAtMq5_p8QaJPOffWsX7U4LB5f8Bq8ZBw4fedxk,4580 +wtforms/ext/django/i18n.py,sha256=VLvzJ8lQOqs5Uxnhe4aOE5StGgPEvGhfBEHNrRQFtp0,626 +wtforms/ext/django/orm.py,sha256=Mme5i_o_bJTXGKkabRz03EJmGggPMejAg95XNhYtNUc,6096 +wtforms/ext/django/templatetags/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +wtforms/ext/django/templatetags/wtforms.py,sha256=iCOicSMEkixm5bcJHz35Zx0h6xVwnz1H9JglB_hU69o,2826 +wtforms/ext/i18n/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +wtforms/ext/i18n/form.py,sha256=mfsavr4LGI1GhoFLsWSuSqVPHH6QNiyqoAfY94u-XP0,1608 +wtforms/ext/i18n/utils.py,sha256=rx9-pNYjIp8DLU-VQ9XxRSXHYZuFv4ktRejzVBPTDBg,530 +wtforms/ext/sqlalchemy/__init__.py,sha256=4U9BzeiFD_YF8pXRsTehei0ekP6jikt2bX4MN3GNT9s,431 +wtforms/ext/sqlalchemy/fields.py,sha256=XwOgJUJCcXvw-QGdF6q2w51m1CI4E_COq8GXb9blgI0,6846 +wtforms/ext/sqlalchemy/orm.py,sha256=6wJN-Zm4YB3st9xsXU5xJR5jQUsdSRqcbEZ7JvvGD9s,10671 +wtforms/fields/__init__.py,sha256=M-0pFfY9EEk-GoYzRkg3yvarM_iP_cRhPjpLEl5KgVU,219 +wtforms/fields/core.py,sha256=KevHc47k4mMJgRGe8Y07UrS_9o_nzXbn3U2HznpdMI0,34307 +wtforms/fields/html5.py,sha256=bwLHIBrEWICRcS80am_lBp6GitDCVIRvBdIWEPJeSz0,1995 +wtforms/fields/simple.py,sha256=dY7cYfb6PXMDjUefXcDeTDWpv3UGyr_BMlebJAeoRso,2218 +wtforms/locale/README.md,sha256=xL3Ain6UPZK3UdL8tMrIKwfodEsPT0IYCVDpI6do524,1062 +wtforms/locale/wtforms.pot,sha256=Sqe4LRpObVRUc30htYXgZuueKYfW7wt2lNVKtM_Jrr0,4170 +wtforms/locale/ar/LC_MESSAGES/wtforms.mo,sha256=r1DDYnBCr1hT7KwEG3NpQLR52i4j_-er5ENIVqT9Sbo,4530 +wtforms/locale/ar/LC_MESSAGES/wtforms.po,sha256=Qkhg_pS-ZEf7jEZz76mDC47UPpqWcU_8t7L88ALAPvk,6262 +wtforms/locale/bg/LC_MESSAGES/wtforms.mo,sha256=aPnglyINf0hH4FGUM3U5OJpqcJT_8XRx6GiaD4Jif3g,4297 +wtforms/locale/bg/LC_MESSAGES/wtforms.po,sha256=xflJaMOGUTNN7zbFMWL-FbMVjmj-Svmvkek84mJl5NI,6356 +wtforms/locale/ca/LC_MESSAGES/wtforms.mo,sha256=zBX48Ru44A2O82FXwC9CwzU3_FiFkUyb4KGNya4toSg,3425 +wtforms/locale/ca/LC_MESSAGES/wtforms.po,sha256=oT09ydRQNsmf0a1uwskao0wfbwQqAh2tKXjFqI_iscw,5465 +wtforms/locale/cs_CZ/LC_MESSAGES/wtforms.mo,sha256=MJQPoiMNPfdHYX5eQQ2OW7PmvQ9BFETva2qm3xmPSvo,3618 +wtforms/locale/cs_CZ/LC_MESSAGES/wtforms.po,sha256=MZ1Iv28-oX4dqzSPgGo65YU3iijeBmYBKZSGsl8YYS0,5596 +wtforms/locale/cy/LC_MESSAGES/wtforms.mo,sha256=8pJPG9dguZLej33ksWSwWmCOKIJ7VmpNVlaDMb30_lc,3371 +wtforms/locale/cy/LC_MESSAGES/wtforms.po,sha256=DTGkDUWJ1MsZqFPV8YhwHaBI1uJP6uXwiud7K3LW1yw,5415 +wtforms/locale/de/LC_MESSAGES/wtforms.mo,sha256=D4BRsJeeT_cKYagO7W1LHQ8YpwC2c7_0hbv3tDgk82E,3412 +wtforms/locale/de/LC_MESSAGES/wtforms.po,sha256=BF7F3vwQOAL_yaZTHi7x2KZnaCTzz3MNUNCtuc6e47A,5457 +wtforms/locale/de_CH/LC_MESSAGES/wtforms.mo,sha256=lBUgz2N_AlkXB4W-CxaNGuHdwhgTrYCPtwM9DWL-pP0,3418 +wtforms/locale/de_CH/LC_MESSAGES/wtforms.po,sha256=LiAqravsNbETdXHJiOi3vJD4o3hWrTRZWSHcLNvHjgc,5477 +wtforms/locale/el/LC_MESSAGES/wtforms.mo,sha256=r0_oQGB_KYBZdSmFsielQMCF0P7rgsLDCA28u37XAkw,4307 +wtforms/locale/el/LC_MESSAGES/wtforms.po,sha256=snlBcC-cjlFdpIbSG9pRGYlWFhl1EaQX72Umv2PWfp8,6345 +wtforms/locale/en/LC_MESSAGES/wtforms.mo,sha256=DCJnvT-_j_oec9za8vxn0FZSog4mm5PnaiWIpesctDE,3285 +wtforms/locale/en/LC_MESSAGES/wtforms.po,sha256=-GGpFQm9Sdz3Yg0EqltIGTEcOwnYqmepRSREkHV_UVU,5347 +wtforms/locale/es/LC_MESSAGES/wtforms.mo,sha256=U_oe-S3-i6A2VsBTVKxZ8N5QAEbpqXBlenSIaLnFupE,3394 +wtforms/locale/es/LC_MESSAGES/wtforms.po,sha256=P36kwWq3LZNjYHXTyoyMl86WziWpZYXxGFsFiqev1oU,5368 +wtforms/locale/et/LC_MESSAGES/wtforms.mo,sha256=Ugx0IpG1TJtP-DKpNZiVyo-L5F8ESrr_qCpPXR96pww,3456 +wtforms/locale/et/LC_MESSAGES/wtforms.po,sha256=doeYijsnPkyHy_JK4JRH6AQdHG8uaQTQWYwsCP6_Iuk,5497 +wtforms/locale/fa/LC_MESSAGES/wtforms.mo,sha256=exJzwjxXvOALqJhsQetN9Kcad4Lx62Exvnx2jtzja8Q,4137 +wtforms/locale/fa/LC_MESSAGES/wtforms.po,sha256=MHjVwlp-MHMV-TTUUkUYtuBdtbEjfV0jzVSgWHFv80Q,6149 +wtforms/locale/fi/LC_MESSAGES/wtforms.mo,sha256=NiodjvNOW25UkxEpuCioXdpvjbGwPoYmz0dfiMxE3S8,3416 +wtforms/locale/fi/LC_MESSAGES/wtforms.po,sha256=4uP6A6sfNoATdRR_8PlecqiiTsVzIp9qpcn9qe0jGMA,5456 +wtforms/locale/fr/LC_MESSAGES/wtforms.mo,sha256=BoZI4I1MK0-nipyLWOSG-s_55E9x9eG0WqYdz1qZ1KQ,3484 +wtforms/locale/fr/LC_MESSAGES/wtforms.po,sha256=60tb7Uyco3tdKc1Z4sdvwta46V_RGSmvXM9SdvuBvhg,5529 +wtforms/locale/he/LC_MESSAGES/wtforms.mo,sha256=UhetGKepgOnGXa5IsjZBdOi5IbPLCufpIugkkDuXkjQ,3649 +wtforms/locale/he/LC_MESSAGES/wtforms.po,sha256=GJy7zG0ik8U0YnubNlfjjl9iPT62w3XyaAP4kNCntkQ,5657 +wtforms/locale/hu/LC_MESSAGES/wtforms.mo,sha256=Z-qEeJI422dmm7-2qJIgCuCS1eyS2pJfoavPnGK2334,3544 +wtforms/locale/hu/LC_MESSAGES/wtforms.po,sha256=eiyNXYa4_XLQWRd-j4KmAXml27cYAPjIBhjjIv9WMbE,5492 +wtforms/locale/it/LC_MESSAGES/wtforms.mo,sha256=petuqW4x1p1S69sJax15WpLQryWoDRXW0uQjr58E9Jw,3510 +wtforms/locale/it/LC_MESSAGES/wtforms.po,sha256=EuI0Plf7nLfg5NcRPqQvfg3z7fpfIdRQGBmyq1ivpGE,5556 +wtforms/locale/ja/LC_MESSAGES/wtforms.mo,sha256=thfPsxKfihz2wNvb9LA7MzYb4PnfyXT81gaE_802AlM,3736 +wtforms/locale/ja/LC_MESSAGES/wtforms.po,sha256=ydUzTwxnk8sUQcPTeS7AuU7sgArIMWgbDzxFt85mhG8,5753 +wtforms/locale/ko/LC_MESSAGES/wtforms.mo,sha256=ZRJGcizRhJifuw4rElZ6Bb-hNdH3zqCYzxhwYJisCpU,3851 +wtforms/locale/ko/LC_MESSAGES/wtforms.po,sha256=9os2sRuqxoX0fTWHr47IvBwlkY_sDoLKdn3byS7MfjQ,5842 +wtforms/locale/nb/LC_MESSAGES/wtforms.mo,sha256=0YxYTElaTGBpIurcZqZHPU2lXslt3UNF_HOw575OAKM,3337 +wtforms/locale/nb/LC_MESSAGES/wtforms.po,sha256=NXrr3nrnoOo2x2t0g8UZXT2Jm9KQnkYdnieeoB7U9Yw,5387 +wtforms/locale/nl/LC_MESSAGES/wtforms.mo,sha256=8wLTkRK82jpG5oDkqM-jLNVLYHte4fRHYF6VAN7lB6U,3350 +wtforms/locale/nl/LC_MESSAGES/wtforms.po,sha256=9xSoztymVdIgFBA2vnzaHeSK4qEGTGbiPbfwjdcHN0k,5388 +wtforms/locale/pl/LC_MESSAGES/wtforms.mo,sha256=QUs5iz_IOoo6oCVmcpWWNNkXyqYA0X01wERmQYQiXYo,3610 +wtforms/locale/pl/LC_MESSAGES/wtforms.po,sha256=XrkwltOhyLHrOOgxYVvcmR2Hcw4LUN3_sZEdJofS5Vk,5652 +wtforms/locale/pt/LC_MESSAGES/wtforms.mo,sha256=PC5HRiM-QYt4GX3eMPapzG31jLKmo3zt6nKGVb_o174,3438 +wtforms/locale/pt/LC_MESSAGES/wtforms.po,sha256=cXIZJJZ4UDDR24yrQ-XYck3klonRZd9Ajt8A7dqqJc4,5481 +wtforms/locale/ru/LC_MESSAGES/wtforms.mo,sha256=ski71qWfnwGL9GtZEQZ1fksHBeZsePxi4ZN16AlLeZE,4406 +wtforms/locale/ru/LC_MESSAGES/wtforms.po,sha256=3eeI-CxivICl6FzYpKrqfYnz2rB68hMNCicC_9aM90s,6407 +wtforms/locale/sk/LC_MESSAGES/wtforms.mo,sha256=Lo_5eGNF_LnkJsJLOde_YNWE_F3UZtScFTFlO4v-EyU,3548 +wtforms/locale/sk/LC_MESSAGES/wtforms.po,sha256=ywPpnxYnHgEkD6Ab7LJgyqgC6dIj8cBmn6hB21aS3NI,5586 +wtforms/locale/sv/LC_MESSAGES/wtforms.mo,sha256=U7noK9cso_pRgaQcvF4duRQ69joI7SHN0XcHyd0mAVg,3376 +wtforms/locale/sv/LC_MESSAGES/wtforms.po,sha256=jMtpwUlQPbi4Xiut9KNfLjGhsjqmys1Y_iGZ3lJA4NQ,5416 +wtforms/locale/tr/LC_MESSAGES/wtforms.mo,sha256=kp3J8k2FVBaXVVJJclGnUmZTEUYHS6Hg1v2baGwtReo,3391 +wtforms/locale/tr/LC_MESSAGES/wtforms.po,sha256=PFo_e3vKCMgKtkcQSaXqNOlr-YgzxvgUtg8Ju5M-8f8,5431 +wtforms/locale/uk/LC_MESSAGES/wtforms.mo,sha256=5iZS-8LmCyeteqN3TXQ15byNTGJbjpsDa8AF3zh6L1o,4451 +wtforms/locale/uk/LC_MESSAGES/wtforms.po,sha256=fIijOGm8gXO-yZkdYoX6kWMPXZE6j9yALhekfQCK5KU,6520 +wtforms/locale/zh/LC_MESSAGES/wtforms.mo,sha256=yCzjCCwAf5yu80NhllpGqlk7V6PBFyJYfoZ6IF2dQnM,3362 +wtforms/locale/zh/LC_MESSAGES/wtforms.po,sha256=ZIh59O9rnjZMRpdKFfvrk59wouOAUHyjZS0f-TMsN6U,5378 +wtforms/locale/zh_TW/LC_MESSAGES/wtforms.mo,sha256=iha5oFUQDVs7wPBpcWLLAP_Jgm42Ea9n9xIlaCsUsNE,3204 +wtforms/locale/zh_TW/LC_MESSAGES/wtforms.po,sha256=a7q2T9fdwN_xESBCD4umHMfSptN7Qt-abjO9UFRWDBo,5218 +wtforms/widgets/__init__.py,sha256=nxI0oIsofuJCNgc4Oxwzf3_q3IiCYZTSiCoEuSRZeJM,124 +wtforms/widgets/core.py,sha256=X3I5PRFbPeX1nU3DrPpsJyglsObujdN1hMxHHFTkKOk,11150 +wtforms/widgets/html5.py,sha256=LDnNegNTx-LYpw4YkbymvS2TaA2V03p2rRdYN83skYQ,2440 +WTForms-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +wtforms/csrf/__pycache__/session.cpython-36.pyc,, +wtforms/csrf/__pycache__/core.cpython-36.pyc,, +wtforms/csrf/__pycache__/__init__.cpython-36.pyc,, +wtforms/widgets/__pycache__/html5.cpython-36.pyc,, +wtforms/widgets/__pycache__/core.cpython-36.pyc,, +wtforms/widgets/__pycache__/__init__.cpython-36.pyc,, +wtforms/__pycache__/form.cpython-36.pyc,, +wtforms/__pycache__/i18n.cpython-36.pyc,, +wtforms/__pycache__/compat.cpython-36.pyc,, +wtforms/__pycache__/meta.cpython-36.pyc,, +wtforms/__pycache__/__init__.cpython-36.pyc,, +wtforms/__pycache__/utils.cpython-36.pyc,, +wtforms/__pycache__/validators.cpython-36.pyc,, +wtforms/ext/csrf/__pycache__/form.cpython-36.pyc,, +wtforms/ext/csrf/__pycache__/session.cpython-36.pyc,, +wtforms/ext/csrf/__pycache__/__init__.cpython-36.pyc,, +wtforms/ext/csrf/__pycache__/fields.cpython-36.pyc,, +wtforms/ext/sqlalchemy/__pycache__/orm.cpython-36.pyc,, +wtforms/ext/sqlalchemy/__pycache__/__init__.cpython-36.pyc,, +wtforms/ext/sqlalchemy/__pycache__/fields.cpython-36.pyc,, +wtforms/ext/django/__pycache__/orm.cpython-36.pyc,, +wtforms/ext/django/__pycache__/i18n.cpython-36.pyc,, +wtforms/ext/django/__pycache__/__init__.cpython-36.pyc,, +wtforms/ext/django/__pycache__/fields.cpython-36.pyc,, +wtforms/ext/django/templatetags/__pycache__/wtforms.cpython-36.pyc,, +wtforms/ext/django/templatetags/__pycache__/__init__.cpython-36.pyc,, +wtforms/ext/appengine/__pycache__/db.cpython-36.pyc,, +wtforms/ext/appengine/__pycache__/ndb.cpython-36.pyc,, +wtforms/ext/appengine/__pycache__/__init__.cpython-36.pyc,, +wtforms/ext/appengine/__pycache__/fields.cpython-36.pyc,, +wtforms/ext/dateutil/__pycache__/__init__.cpython-36.pyc,, +wtforms/ext/dateutil/__pycache__/fields.cpython-36.pyc,, +wtforms/ext/__pycache__/__init__.cpython-36.pyc,, +wtforms/ext/i18n/__pycache__/form.cpython-36.pyc,, +wtforms/ext/i18n/__pycache__/__init__.cpython-36.pyc,, +wtforms/ext/i18n/__pycache__/utils.cpython-36.pyc,, +wtforms/fields/__pycache__/simple.cpython-36.pyc,, +wtforms/fields/__pycache__/html5.cpython-36.pyc,, +wtforms/fields/__pycache__/core.cpython-36.pyc,, +wtforms/fields/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/WHEEL b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/top_level.txt new file mode 100644 index 0000000..26d80fd --- /dev/null +++ b/py3/lib/python3.6/site-packages/WTForms-2.2.1.dist-info/top_level.txt @@ -0,0 +1 @@ +wtforms diff --git a/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/LICENSE.rst b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/LICENSE.rst new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/METADATA b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/METADATA new file mode 100644 index 0000000..34ce7ea --- /dev/null +++ b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/METADATA @@ -0,0 +1,128 @@ +Metadata-Version: 2.1 +Name: Werkzeug +Version: 0.15.5 +Summary: The comprehensive WSGI web application library. +Home-page: https://palletsprojects.com/p/werkzeug/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Documentation, https://werkzeug.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/werkzeug +Project-URL: Issue tracker, https://github.com/pallets/werkzeug/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Provides-Extra: dev +Requires-Dist: pytest ; extra == 'dev' +Requires-Dist: coverage ; extra == 'dev' +Requires-Dist: tox ; extra == 'dev' +Requires-Dist: sphinx ; extra == 'dev' +Requires-Dist: pallets-sphinx-themes ; extra == 'dev' +Requires-Dist: sphinx-issues ; extra == 'dev' +Provides-Extra: termcolor +Requires-Dist: termcolor ; extra == 'termcolor' +Provides-Extra: watchdog +Requires-Dist: watchdog ; extra == 'watchdog' + +Werkzeug +======== + +*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") + +Werkzeug is a comprehensive `WSGI`_ web application library. It began as +a simple collection of various utilities for WSGI applications and has +become one of the most advanced WSGI utility libraries. + +It includes: + +- An interactive debugger that allows inspecting stack traces and + source code in the browser with an interactive interpreter for any + frame in the stack. +- A full-featured request object with objects to interact with + headers, query args, form data, files, and cookies. +- A response object that can wrap other WSGI applications and handle + streaming data. +- A routing system for matching URLs to endpoints and generating URLs + for endpoints, with an extensible system for capturing variables + from URLs. +- HTTP utilities to handle entity tags, cache control, dates, user + agents, cookies, files, and more. +- A threaded WSGI server for use while developing applications + locally. +- A test client for simulating HTTP requests during testing without + requiring running a server. + +Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up +to the developer to choose a template engine, database adapter, and even +how to handle requests. It can be used to build all sorts of end user +applications such as blogs, wikis, or bulletin boards. + +`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +providing more structure and patterns for defining powerful +applications. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Werkzeug + + +A Simple Example +---------------- + +.. code-block:: python + + from werkzeug.wrappers import Request, Response + + @Request.application + def application(request): + return Response('Hello, World!') + + if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 4000, application) + + +Links +----- + +- Website: https://palletsprojects.com/p/werkzeug/ +- Documentation: https://werkzeug.palletsprojects.com/ +- Releases: https://pypi.org/project/Werkzeug/ +- Code: https://github.com/pallets/werkzeug +- Issue tracker: https://github.com/pallets/werkzeug/issues +- Test status: https://dev.azure.com/pallets/werkzeug/_build +- Official chat: https://discord.gg/t6rrQZH + +.. _WSGI: https://wsgi.readthedocs.io/en/latest/ +.. _Flask: https://www.palletsprojects.com/p/flask/ +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + diff --git a/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/RECORD b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/RECORD new file mode 100644 index 0000000..8eedae8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/RECORD @@ -0,0 +1,119 @@ +werkzeug/__init__.py,sha256=TqqguxCOVD9JRfiTjo_JNHcEmtQsdK1B9FxwDNchE18,6805 +werkzeug/_compat.py,sha256=oBEVVrJT4sqYdIZbUWmgV9T9w257RhTSDBlTjh0Zbb0,6431 +werkzeug/_internal.py,sha256=Wx7cpTRWqeBd0LAqobo0lCO4pNUW4oav6XKf7Taumgk,14590 +werkzeug/_reloader.py,sha256=J0EYq3YrGtC3bNDRqWCHf4yBPuVYoZBy8qp69eHO3YQ,11281 +werkzeug/datastructures.py,sha256=8HoA4Gu9i7ZWi5OBjx244OLWvDEE4JTQQUUTRoAYKog,91761 +werkzeug/exceptions.py,sha256=UzmMCkt5PCn5-TDNI0iGxGz07d3sKHQoArJrvurqVBE,23638 +werkzeug/filesystem.py,sha256=HzKl-j0Hd8Jl66j778UbPTAYNnY6vUZgYLlBZ0e7uw0,2101 +werkzeug/formparser.py,sha256=tN6SO4mn6RUsxRZq4qVBWXbNWNuasn2KaBznTieMaVk,21790 +werkzeug/http.py,sha256=t0ET2tySAf9ZWdEelVWJoLaZzFViYpjoUmiYHPz10-E,43304 +werkzeug/local.py,sha256=USVEcgIg-oCiUJFPIecFIW9jkIejfw4Fjf1u5yN-Np4,14456 +werkzeug/posixemulation.py,sha256=gSSiv1SCmOyzOM_nq1ZaZCtxP__C5MeDJl_4yXJmi4Q,3541 +werkzeug/routing.py,sha256=BSgjrYNwj2j5dAHQtK4INEp2TOf4OJP8hBncYSRO2ps,73410 +werkzeug/security.py,sha256=81149MplFq7-hD4RK4sKp9kzXXejjV9D4lWBzaRyeQ8,8106 +werkzeug/serving.py,sha256=tUFUMg7Bj9iw3nA8ZgC_czMDJJKN7vFskajEmgEFhzE,36597 +werkzeug/test.py,sha256=Cnb5xa3vLDL0hzFCH1fkG_YRpndViGQgCh4D744iSQk,40645 +werkzeug/testapp.py,sha256=hcKBzorVlSHC-uGvGXXjCm3FzCwGWq4yjbTG3Pr7MV8,9301 +werkzeug/urls.py,sha256=8yHdYI99N__-isoTwvGqvuj9QhOh66dd1Xh1DIp0q0g,39261 +werkzeug/useragents.py,sha256=FIonyUF790Ro8OG8cJqG1zixhg5YzXdHmkZbrnK0QRo,5965 +werkzeug/utils.py,sha256=O20Y0qWk5O1IWamC_A5gkmzR5cgBd3yDIHviwBTfNB0,27387 +werkzeug/wsgi.py,sha256=h-zyAeInwE6X6ciSnHI14ImA85adV-F861PmR7UGtRk,36681 +werkzeug/contrib/__init__.py,sha256=EvNyiiCF49j5P0fZYJ3ZGe82ofXdSBvUNqWFwwBMibQ,553 +werkzeug/contrib/atom.py,sha256=KpPJcTfzNW1J0VNQckCbVtVGBe3V8s451tOUya4qByI,15415 +werkzeug/contrib/cache.py,sha256=AEh5UIw-Ui7sHZnlpvrD7ueOKUhCaAD55FXiPtXbbRs,32115 +werkzeug/contrib/fixers.py,sha256=peEtAiIWYT5bh00EWEPOGKzGZXivOzVhhzKPvvzk1RM,9193 +werkzeug/contrib/iterio.py,sha256=KKHa_8aCF_uhoeQVyPGUwrivuB6y6nNdXYo2D2vzOA8,10928 +werkzeug/contrib/lint.py,sha256=NdIxP0E2kVt1xDIxoaIz3Rcl8ZdgmHaFbGTOaybGpN4,296 +werkzeug/contrib/profiler.py,sha256=k_oMLU-AtsVvQ9TxNdermY6FuzSTYr-WE-ZmWb_DMyU,1229 +werkzeug/contrib/securecookie.py,sha256=xbtElskGmtbiApgOJ5WhGgqGDs_68_PcWzqDIAY_QZY,13076 +werkzeug/contrib/sessions.py,sha256=oVXh_7-6_CWOMxDKqcaK05H8RpYoWqAd3al-KzMFPYs,13042 +werkzeug/contrib/wrappers.py,sha256=ZmNk0wpzD66yomPnQxapndZQs4c0kNJaRzqI-BVxeQk,13199 +werkzeug/debug/__init__.py,sha256=Bo3HvgTNY4NQ_2jROTSk3r1ScZcT_g_4EnuHTjKyrKM,18275 +werkzeug/debug/console.py,sha256=HoBL21bbcmtiCLqiLDJLZi1LYnWMZxjoXYH5WaZB1XY,5469 +werkzeug/debug/repr.py,sha256=lIwuhbyrMwVe3P_cFqNyqzHL7P93TLKod7lw9clydEw,9621 +werkzeug/debug/tbtools.py,sha256=SkAAA4KKfwsXJinUbf-AEP4GqONTsR4uU7WPUloXcSE,20318 +werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 +werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 +werkzeug/debug/shared/debugger.js,sha256=rOhqZMRfpZnnu6_XCGn6wMWPhtfwRAcyZKksdIxPJas,6400 +werkzeug/debug/shared/jquery.js,sha256=CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo,88145 +werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 +werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 +werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 +werkzeug/debug/shared/style.css,sha256=gZ9uhmb5zj3XLuT9RvnMp6jMINgQ-VVBCp-2AZbG3YQ,6604 +werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 +werkzeug/middleware/__init__.py,sha256=f1SFZo67IlW4k1uqKzNHxYQlsakUS-D6KK_j0e3jjwQ,549 +werkzeug/middleware/dispatcher.py,sha256=_-KoMzHtcISHS7ouWKAOraqlCLprdh83YOAn_8DjLp8,2240 +werkzeug/middleware/http_proxy.py,sha256=lRjTdMmghHiZuZrS7_UJ3gZc-vlFizhBbFZ-XZPLwIA,7117 +werkzeug/middleware/lint.py,sha256=ItTwuWJnflF8xMT1uqU_Ty1ryhux-CjeUfskqaUpxsw,12967 +werkzeug/middleware/profiler.py,sha256=8B_s23d6BGrU_q54gJsm6kcCbOJbTSqrXCsioHON0Xs,4471 +werkzeug/middleware/proxy_fix.py,sha256=Y86VcU2oAQ--x0mi4iFVJyEFMzp3Ao8q0zvr_SsrpNw,8506 +werkzeug/middleware/shared_data.py,sha256=WtSphPrsUdpEk4E-_09CAILhfOBJ1YtcX1LrxcQfIzw,8224 +werkzeug/wrappers/__init__.py,sha256=S4VioKAmF_av9Ec9zQvG71X1EOkYfPx1TYck9jyDiyY,1384 +werkzeug/wrappers/accept.py,sha256=TIvjUc0g73fhTWX54wg_D9NNzKvpnG1X8u1w26tK1o8,1760 +werkzeug/wrappers/auth.py,sha256=Pmn6iaGHBrUyHbJpW0lZhO_q9RVoAa5QalaTqcavdAI,1158 +werkzeug/wrappers/base_request.py,sha256=aknREwqVT7WJUxm4weUGdBj90H6rDR3DvsIvmYhaC8A,26943 +werkzeug/wrappers/base_response.py,sha256=ZA1XlxtsbvG4SpbdOEMT5--z7aZM0w6C5y33W8wOXa4,27906 +werkzeug/wrappers/common_descriptors.py,sha256=OJ8jOwMun4L-BxCuFPkK1vaefx_-Y5IndVXvvn_ems4,12089 +werkzeug/wrappers/etag.py,sha256=TwMO1fvluXbBqnFTj2DvrCNa3mYhbHYe1UZAVzfXvuU,12533 +werkzeug/wrappers/json.py,sha256=HvK_A4NpO0sLqgb10sTJcoZydYOwyNiPCJPV7SVgcgE,4343 +werkzeug/wrappers/request.py,sha256=qPo2zmmBv4HxboywtWZb2pJL8OPXo07BUXBKw2j9Fi8,1338 +werkzeug/wrappers/response.py,sha256=vDZFEGzDOG0jjmS0uVVjeT3hqRt1hFaf15npnx7RD28,2329 +werkzeug/wrappers/user_agent.py,sha256=4bTgQKTLQmGUyxOREYOzbeiFP2VwIOE7E14AhUB5NqM,444 +Werkzeug-0.15.5.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Werkzeug-0.15.5.dist-info/METADATA,sha256=QMmXGaI5L-K5NH-S1q3ATVwWxqU0tQgUCdrJ7_OzqwU,4712 +Werkzeug-0.15.5.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +Werkzeug-0.15.5.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 +Werkzeug-0.15.5.dist-info/RECORD,, +Werkzeug-0.15.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +werkzeug/middleware/__pycache__/shared_data.cpython-36.pyc,, +werkzeug/middleware/__pycache__/proxy_fix.cpython-36.pyc,, +werkzeug/middleware/__pycache__/dispatcher.cpython-36.pyc,, +werkzeug/middleware/__pycache__/profiler.cpython-36.pyc,, +werkzeug/middleware/__pycache__/lint.cpython-36.pyc,, +werkzeug/middleware/__pycache__/http_proxy.cpython-36.pyc,, +werkzeug/middleware/__pycache__/__init__.cpython-36.pyc,, +werkzeug/debug/__pycache__/tbtools.cpython-36.pyc,, +werkzeug/debug/__pycache__/__init__.cpython-36.pyc,, +werkzeug/debug/__pycache__/repr.cpython-36.pyc,, +werkzeug/debug/__pycache__/console.cpython-36.pyc,, +werkzeug/contrib/__pycache__/securecookie.cpython-36.pyc,, +werkzeug/contrib/__pycache__/cache.cpython-36.pyc,, +werkzeug/contrib/__pycache__/wrappers.cpython-36.pyc,, +werkzeug/contrib/__pycache__/fixers.cpython-36.pyc,, +werkzeug/contrib/__pycache__/profiler.cpython-36.pyc,, +werkzeug/contrib/__pycache__/lint.cpython-36.pyc,, +werkzeug/contrib/__pycache__/atom.cpython-36.pyc,, +werkzeug/contrib/__pycache__/sessions.cpython-36.pyc,, +werkzeug/contrib/__pycache__/__init__.cpython-36.pyc,, +werkzeug/contrib/__pycache__/iterio.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/response.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/base_request.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/request.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/user_agent.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/auth.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/accept.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/etag.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/json.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/__init__.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/common_descriptors.cpython-36.pyc,, +werkzeug/wrappers/__pycache__/base_response.cpython-36.pyc,, +werkzeug/__pycache__/formparser.cpython-36.pyc,, +werkzeug/__pycache__/_compat.cpython-36.pyc,, +werkzeug/__pycache__/useragents.cpython-36.pyc,, +werkzeug/__pycache__/local.cpython-36.pyc,, +werkzeug/__pycache__/posixemulation.cpython-36.pyc,, +werkzeug/__pycache__/routing.cpython-36.pyc,, +werkzeug/__pycache__/urls.cpython-36.pyc,, +werkzeug/__pycache__/serving.cpython-36.pyc,, +werkzeug/__pycache__/filesystem.cpython-36.pyc,, +werkzeug/__pycache__/_internal.cpython-36.pyc,, +werkzeug/__pycache__/testapp.cpython-36.pyc,, +werkzeug/__pycache__/test.cpython-36.pyc,, +werkzeug/__pycache__/wsgi.cpython-36.pyc,, +werkzeug/__pycache__/__init__.cpython-36.pyc,, +werkzeug/__pycache__/security.cpython-36.pyc,, +werkzeug/__pycache__/http.cpython-36.pyc,, +werkzeug/__pycache__/utils.cpython-36.pyc,, +werkzeug/__pycache__/_reloader.cpython-36.pyc,, +werkzeug/__pycache__/datastructures.cpython-36.pyc,, +werkzeug/__pycache__/exceptions.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/WHEEL b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/WHEEL new file mode 100644 index 0000000..78e6f69 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/top_level.txt new file mode 100644 index 0000000..6fe8da8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/top_level.txt @@ -0,0 +1 @@ +werkzeug diff --git a/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/DESCRIPTION.rst b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..5be2b5a --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,138 @@ +aiofiles: file support for asyncio +================================== + +.. image:: https://img.shields.io/pypi/v/aiofiles.svg + :target: https://pypi.python.org/pypi/aiofiles + +.. image:: https://travis-ci.org/Tinche/aiofiles.svg?branch=master + :target: https://travis-ci.org/Tinche/aiofiles + +.. image:: https://codecov.io/gh/Tinche/aiofiles/branch/master/graph/badge.svg + :target: https://codecov.io/gh/Tinche/aiofiles + +**aiofiles** is an Apache2 licensed library, written in Python, for handling local +disk files in asyncio applications. + +Ordinary local file IO is blocking, and cannot easily and portably made +asynchronous. This means doing file IO may interfere with asyncio applications, +which shouldn't block the executing thread. aiofiles helps with this by +introducing asynchronous versions of files that support delegating operations to +a separate thread pool. + +.. code-block:: python + + async with aiofiles.open('filename', mode='r') as f: + contents = await f.read() + print(contents) + 'My file contents' + +Asynchronous iteration is also supported. + +.. code-block:: python + + async with aiofiles.open('filename') as f: + async for line in f: + ... + +Features +-------- + +- a file API very similar to Python's standard, blocking API +- support for buffered and unbuffered binary files, and buffered text files +- support for ``async``/``await`` (:PEP:`492`) constructs + + +Installation +------------ + +To install aiofiles, simply: + +.. code-block:: bash + + $ pip install aiofiles + +Usage +----- + +Files are opened using the ``aiofiles.open()`` coroutine, which in addition to +mirroring the builtin ``open`` accepts optional ``loop`` and ``executor`` +arguments. If ``loop`` is absent, the default loop will be used, as per the +set asyncio policy. If ``executor`` is not specified, the default event loop +executor will be used. + +In case of success, an asynchronous file object is returned with an +API identical to an ordinary file, except the following methods are coroutines +and delegate to an executor: + +* ``close`` +* ``flush`` +* ``isatty`` +* ``read`` +* ``readall`` +* ``read1`` +* ``readinto`` +* ``readline`` +* ``readlines`` +* ``seek`` +* ``seekable`` +* ``tell`` +* ``truncate`` +* ``writable`` +* ``write`` +* ``writelines`` + +In case of failure, one of the usual exceptions will be raised. + +The ``aiofiles.os`` module contains executor-enabled coroutine versions of +several useful ``os`` functions that deal with files: + +* ``stat`` +* ``sendfile`` + +Writing tests for aiofiles +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Real file IO can be mocked by patching ``aiofiles.threadpool.sync_open`` +as desired. The return type also needs to be registered with the +``aiofiles.threadpool.wrap`` dispatcher: + +.. code-block:: python + + aiofiles.threadpool.wrap.register(mock.MagicMock)( + lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs)) + + async def test_stuff(): + data = 'data' + mock_file = mock.MagicMock() + + with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open: + async with aiofiles.open('filename', 'w') as f: + await f.write(data) + + mock_file.write.assert_called_once_with(data) + +History +~~~~~~~ + +0.4.0 (2018-08-11) +`````````````````` +- Python 3.7 support. +- Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x. + +0.3.2 (2017-09-23) +`````````````````` +- The LICENSE is now included in the sdist. + `#31 `_ + +0.3.1 (2017-03-10) +`````````````````` + +- Introduced a changelog. +- ``aiofiles.os.sendfile`` will now work if the standard ``os`` module contains a ``sendfile`` function. + +Contributing +~~~~~~~~~~~~ +Contributions are very welcome. Tests can be run with ``tox``, please ensure +the coverage at least stays the same before you submit a pull request. + + diff --git a/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/METADATA b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/METADATA new file mode 100644 index 0000000..2e1c2f6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/METADATA @@ -0,0 +1,155 @@ +Metadata-Version: 2.0 +Name: aiofiles +Version: 0.4.0 +Summary: File support for asyncio. +Home-page: https://github.com/Tinche/aiofiles +Author: Tin Tvrtkovic +Author-email: tinchester@gmail.com +License: Apache 2.0 +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: System :: Filesystems + +aiofiles: file support for asyncio +================================== + +.. image:: https://img.shields.io/pypi/v/aiofiles.svg + :target: https://pypi.python.org/pypi/aiofiles + +.. image:: https://travis-ci.org/Tinche/aiofiles.svg?branch=master + :target: https://travis-ci.org/Tinche/aiofiles + +.. image:: https://codecov.io/gh/Tinche/aiofiles/branch/master/graph/badge.svg + :target: https://codecov.io/gh/Tinche/aiofiles + +**aiofiles** is an Apache2 licensed library, written in Python, for handling local +disk files in asyncio applications. + +Ordinary local file IO is blocking, and cannot easily and portably made +asynchronous. This means doing file IO may interfere with asyncio applications, +which shouldn't block the executing thread. aiofiles helps with this by +introducing asynchronous versions of files that support delegating operations to +a separate thread pool. + +.. code-block:: python + + async with aiofiles.open('filename', mode='r') as f: + contents = await f.read() + print(contents) + 'My file contents' + +Asynchronous iteration is also supported. + +.. code-block:: python + + async with aiofiles.open('filename') as f: + async for line in f: + ... + +Features +-------- + +- a file API very similar to Python's standard, blocking API +- support for buffered and unbuffered binary files, and buffered text files +- support for ``async``/``await`` (:PEP:`492`) constructs + + +Installation +------------ + +To install aiofiles, simply: + +.. code-block:: bash + + $ pip install aiofiles + +Usage +----- + +Files are opened using the ``aiofiles.open()`` coroutine, which in addition to +mirroring the builtin ``open`` accepts optional ``loop`` and ``executor`` +arguments. If ``loop`` is absent, the default loop will be used, as per the +set asyncio policy. If ``executor`` is not specified, the default event loop +executor will be used. + +In case of success, an asynchronous file object is returned with an +API identical to an ordinary file, except the following methods are coroutines +and delegate to an executor: + +* ``close`` +* ``flush`` +* ``isatty`` +* ``read`` +* ``readall`` +* ``read1`` +* ``readinto`` +* ``readline`` +* ``readlines`` +* ``seek`` +* ``seekable`` +* ``tell`` +* ``truncate`` +* ``writable`` +* ``write`` +* ``writelines`` + +In case of failure, one of the usual exceptions will be raised. + +The ``aiofiles.os`` module contains executor-enabled coroutine versions of +several useful ``os`` functions that deal with files: + +* ``stat`` +* ``sendfile`` + +Writing tests for aiofiles +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Real file IO can be mocked by patching ``aiofiles.threadpool.sync_open`` +as desired. The return type also needs to be registered with the +``aiofiles.threadpool.wrap`` dispatcher: + +.. code-block:: python + + aiofiles.threadpool.wrap.register(mock.MagicMock)( + lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs)) + + async def test_stuff(): + data = 'data' + mock_file = mock.MagicMock() + + with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open: + async with aiofiles.open('filename', 'w') as f: + await f.write(data) + + mock_file.write.assert_called_once_with(data) + +History +~~~~~~~ + +0.4.0 (2018-08-11) +`````````````````` +- Python 3.7 support. +- Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x. + +0.3.2 (2017-09-23) +`````````````````` +- The LICENSE is now included in the sdist. + `#31 `_ + +0.3.1 (2017-03-10) +`````````````````` + +- Introduced a changelog. +- ``aiofiles.os.sendfile`` will now work if the standard ``os`` module contains a ``sendfile`` function. + +Contributing +~~~~~~~~~~~~ +Contributions are very welcome. Tests can be run with ``tox``, please ensure +the coverage at least stays the same before you submit a pull request. + + diff --git a/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/RECORD b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/RECORD new file mode 100644 index 0000000..9a16836 --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/RECORD @@ -0,0 +1,23 @@ +aiofiles/__init__.py,sha256=EqOSYq0u50pdIbUzDXqyeImxnHUmT9AtbRi_PtO5Hyw,123 +aiofiles/_compat.py,sha256=dk34urK9pKm1iqKNp2jKDWatAbn3Tt18eRn31G9BcAI,205 +aiofiles/base.py,sha256=OFBt0XoRATidR8UeVldUZmul6-jgRNTn6qebAgFoMys,2121 +aiofiles/os.py,sha256=zNYhMoGytKY994XPCqetbC4ScLgmZjJCjQfZNUm2NrU,514 +aiofiles/threadpool/__init__.py,sha256=io-WG10ohdQU6CdPZEV24oBssL_9T6NEf6AEAHA18Ao,2144 +aiofiles/threadpool/binary.py,sha256=Ds7je-noWGqtwBOd21OF7DNV1m9VFt3Z29ynS7hGf-k,1102 +aiofiles/threadpool/text.py,sha256=blq1hfMSQ_kEtDMuRwf-CtHNeOAwdgIOYYVDtFJi8CI,629 +aiofiles/threadpool/utils.py,sha256=8apQJirPwOgUeRUbT2ghDpqmiXaUq_ZJ5eQRklHgz3U,1266 +aiofiles-0.4.0.dist-info/DESCRIPTION.rst,sha256=H1Vj_rqfRMCdJcU1lW_sPSIWr185N97Kn594gYK_B1A,3873 +aiofiles-0.4.0.dist-info/METADATA,sha256=RqoqpyBnW1j0u1t3w9jQ_F-0vZWsepXvs0MA-GFoMsM,4445 +aiofiles-0.4.0.dist-info/RECORD,, +aiofiles-0.4.0.dist-info/WHEEL,sha256=8Lm45v9gcYRm70DrgFGVe4WsUtUMi1_0Tso1hqPGMjA,92 +aiofiles-0.4.0.dist-info/metadata.json,sha256=0eE-SIzvVCI2-nBAir1RKYLcPpxWa1uzpmAyD9Vye9M,712 +aiofiles-0.4.0.dist-info/top_level.txt,sha256=sskrEAT1Ocyj9qsJIoeIQNAijBFwY2L0nqayXghOSI0,9 +aiofiles-0.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiofiles/__pycache__/_compat.cpython-36.pyc,, +aiofiles/__pycache__/base.cpython-36.pyc,, +aiofiles/__pycache__/os.cpython-36.pyc,, +aiofiles/__pycache__/__init__.cpython-36.pyc,, +aiofiles/threadpool/__pycache__/text.cpython-36.pyc,, +aiofiles/threadpool/__pycache__/binary.cpython-36.pyc,, +aiofiles/threadpool/__pycache__/__init__.cpython-36.pyc,, +aiofiles/threadpool/__pycache__/utils.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/WHEEL b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/WHEEL new file mode 100644 index 0000000..6261a26 --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/metadata.json b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/metadata.json new file mode 100644 index 0000000..8309a32 --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: System :: Filesystems"], "extensions": {"python.details": {"contacts": [{"email": "tinchester@gmail.com", "name": "Tin Tvrtkovic", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/Tinche/aiofiles"}}}, "generator": "bdist_wheel (0.30.0)", "license": "Apache 2.0", "metadata_version": "2.0", "name": "aiofiles", "summary": "File support for asyncio.", "version": "0.4.0"} \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..1ce9bba --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles-0.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +aiofiles diff --git a/py3/lib/python3.6/site-packages/aiofiles/__init__.py b/py3/lib/python3.6/site-packages/aiofiles/__init__.py new file mode 100644 index 0000000..3c4d362 --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles/__init__.py @@ -0,0 +1,6 @@ +"""Utilities for asyncio-friendly file handling.""" +from .threadpool import open + +__version__ = "0.4.0" + +__all__ = (open,) diff --git a/py3/lib/python3.6/site-packages/aiofiles/_compat.py b/py3/lib/python3.6/site-packages/aiofiles/_compat.py new file mode 100644 index 0000000..2921cc5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles/_compat.py @@ -0,0 +1,8 @@ +import sys + +try: + from functools import singledispatch +except ImportError: # pragma: nocover + from singledispatch import singledispatch + +PY_35 = sys.version_info >= (3, 5) diff --git a/py3/lib/python3.6/site-packages/aiofiles/base.py b/py3/lib/python3.6/site-packages/aiofiles/base.py new file mode 100644 index 0000000..72a835e --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles/base.py @@ -0,0 +1,93 @@ +"""Various base classes.""" +import asyncio +from collections.abc import Coroutine + + +class AsyncBase: + def __init__(self, file, loop, executor): + self._file = file + self._loop = loop + self._executor = executor + + def __aiter__(self): + """We are our own iterator.""" + return self + + @asyncio.coroutine + def __anext__(self): + """Simulate normal file iteration.""" + line = yield from self.readline() + if line: + return line + else: + raise StopAsyncIteration + + +class _ContextManager(Coroutine): + __slots__ = ('_coro', '_obj') + + def __init__(self, coro): + self._coro = coro + self._obj = None + + def send(self, value): + return self._coro.send(value) + + def throw(self, typ, val=None, tb=None): + if val is None: + return self._coro.throw(typ) + elif tb is None: + return self._coro.throw(typ, val) + else: + return self._coro.throw(typ, val, tb) + + def close(self): + return self._coro.close() + + @property + def gi_frame(self): + return self._coro.gi_frame + + @property + def gi_running(self): + return self._coro.gi_running + + @property + def gi_code(self): + return self._coro.gi_code + + def __next__(self): + return self.send(None) + + @asyncio.coroutine + def __iter__(self): + resp = yield from self._coro + return resp + + def __await__(self): + resp = yield from self._coro + return resp + + @asyncio.coroutine + def __anext__(self): + resp = yield from self._coro + return resp + + @asyncio.coroutine + def __aenter__(self): + self._obj = yield from self._coro + return self._obj + + @asyncio.coroutine + def __aexit__(self, exc_type, exc, tb): + self._obj.close() + self._obj = None + + +class AiofilesContextManager(_ContextManager): + """An adjusted async context manager for aiofiles.""" + + @asyncio.coroutine + def __aexit__(self, exc_type, exc_val, exc_tb): + yield from self._obj.close() + self._obj = None diff --git a/py3/lib/python3.6/site-packages/aiofiles/os.py b/py3/lib/python3.6/site-packages/aiofiles/os.py new file mode 100644 index 0000000..9b5c3ea --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles/os.py @@ -0,0 +1,22 @@ +"""Async executor versions of file functions from the os module.""" +import asyncio +from functools import partial, wraps +import os + + +def wrap(func): + @asyncio.coroutine + @wraps(func) + def run(*args, loop=None, executor=None, **kwargs): + if loop is None: + loop = asyncio.get_event_loop() + pfunc = partial(func, *args, **kwargs) + return loop.run_in_executor(executor, pfunc) + + return run + + +stat = wrap(os.stat) + +if hasattr(os, "sendfile"): + sendfile = wrap(os.sendfile) diff --git a/py3/lib/python3.6/site-packages/aiofiles/threadpool/__init__.py b/py3/lib/python3.6/site-packages/aiofiles/threadpool/__init__.py new file mode 100644 index 0000000..a8ea978 --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles/threadpool/__init__.py @@ -0,0 +1,63 @@ +"""Handle files using a thread pool executor.""" +import asyncio + +from io import (FileIO, TextIOBase, BufferedReader, BufferedWriter, + BufferedRandom) +from functools import partial, singledispatch + +from .binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO +from .text import AsyncTextIOWrapper +from ..base import AiofilesContextManager + +sync_open = open + +__all__ = ('open', ) + + +def open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, + closefd=True, opener=None, *, loop=None, executor=None): + return AiofilesContextManager(_open(file, mode=mode, buffering=buffering, + encoding=encoding, errors=errors, + newline=newline, closefd=closefd, + opener=opener, loop=loop, + executor=executor)) + + +@asyncio.coroutine +def _open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, + closefd=True, opener=None, *, loop=None, executor=None): + """Open an asyncio file.""" + if loop is None: + loop = asyncio.get_event_loop() + cb = partial(sync_open, file, mode=mode, buffering=buffering, + encoding=encoding, errors=errors, newline=newline, + closefd=closefd, opener=opener) + f = yield from loop.run_in_executor(executor, cb) + + return wrap(f, loop=loop, executor=executor) + + +@singledispatch +def wrap(file, *, loop=None, executor=None): + raise TypeError('Unsupported io type: {}.'.format(file)) + + +@wrap.register(TextIOBase) +def _(file, *, loop=None, executor=None): + return AsyncTextIOWrapper(file, loop=loop, executor=executor) + + +@wrap.register(BufferedWriter) +def _(file, *, loop=None, executor=None): + return AsyncBufferedIOBase(file, loop=loop, executor=executor) + + +@wrap.register(BufferedReader) +@wrap.register(BufferedRandom) +def _(file, *, loop=None, executor=None): + return AsyncBufferedReader(file, loop=loop, executor=executor) + + +@wrap.register(FileIO) +def _(file, *, loop=None, executor=None): + return AsyncFileIO(file, loop, executor) diff --git a/py3/lib/python3.6/site-packages/aiofiles/threadpool/binary.py b/py3/lib/python3.6/site-packages/aiofiles/threadpool/binary.py new file mode 100644 index 0000000..288b54b --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles/threadpool/binary.py @@ -0,0 +1,26 @@ +from ..base import AsyncBase +from .utils import (delegate_to_executor, proxy_property_directly, + proxy_method_directly) + + +@delegate_to_executor('close', 'flush', 'isatty', 'read', 'read1', 'readinto', + 'readline', 'readlines', 'seek', 'seekable', 'tell', + 'truncate', 'writable', 'write', 'writelines') +@proxy_method_directly('detach', 'fileno', 'readable') +@proxy_property_directly('closed', 'raw') +class AsyncBufferedIOBase(AsyncBase): + """The asyncio executor version of io.BufferedWriter.""" + + +@delegate_to_executor('peek') +class AsyncBufferedReader(AsyncBufferedIOBase): + """The asyncio executor version of io.BufferedReader and Random.""" + + +@delegate_to_executor('close', 'flush', 'isatty', 'read', 'readall', 'readinto', + 'readline', 'readlines', 'seek', 'seekable', 'tell', + 'truncate', 'writable', 'write', 'writelines') +@proxy_method_directly('fileno', 'readable') +@proxy_property_directly('closed') +class AsyncFileIO(AsyncBase): + """The asyncio executor version of io.FileIO.""" diff --git a/py3/lib/python3.6/site-packages/aiofiles/threadpool/text.py b/py3/lib/python3.6/site-packages/aiofiles/threadpool/text.py new file mode 100644 index 0000000..7668c0a --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles/threadpool/text.py @@ -0,0 +1,13 @@ +from .utils import (delegate_to_executor, proxy_property_directly, + proxy_method_directly) +from ..base import AsyncBase + + +@delegate_to_executor('close', 'flush', 'isatty', 'read', 'readable', + 'readline', 'readlines', 'seek', 'seekable', 'tell', + 'truncate', 'write', 'writable', 'writelines') +@proxy_method_directly('detach', 'fileno', 'readable') +@proxy_property_directly('buffer', 'closed', 'encoding', 'errors', + 'line_buffering', 'newlines') +class AsyncTextIOWrapper(AsyncBase): + """The asyncio executor version of io.TextIOWrapper.""" diff --git a/py3/lib/python3.6/site-packages/aiofiles/threadpool/utils.py b/py3/lib/python3.6/site-packages/aiofiles/threadpool/utils.py new file mode 100644 index 0000000..368b44b --- /dev/null +++ b/py3/lib/python3.6/site-packages/aiofiles/threadpool/utils.py @@ -0,0 +1,49 @@ +import asyncio +import functools + + +def delegate_to_executor(*attrs): + def cls_builder(cls): + for attr_name in attrs: + setattr(cls, attr_name, _make_delegate_method(attr_name)) + return cls + return cls_builder + + +def proxy_method_directly(*attrs): + def cls_builder(cls): + for attr_name in attrs: + setattr(cls, attr_name, _make_proxy_method(attr_name)) + return cls + + return cls_builder + + +def proxy_property_directly(*attrs): + def cls_builder(cls): + for attr_name in attrs: + setattr(cls, attr_name, _make_proxy_property(attr_name)) + return cls + + return cls_builder + + +def _make_delegate_method(attr_name): + @asyncio.coroutine + def method(self, *args, **kwargs): + cb = functools.partial(getattr(self._file, attr_name), + *args, **kwargs) + return (yield from self._loop.run_in_executor(self._executor, cb)) + return method + + +def _make_proxy_method(attr_name): + def method(self, *args, **kwargs): + return getattr(self._file, attr_name)(*args, **kwargs) + return method + + +def _make_proxy_property(attr_name): + def proxy_property(self): + return getattr(self._file, attr_name) + return property(proxy_property) diff --git a/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/PKG-INFO b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/PKG-INFO new file mode 100644 index 0000000..7bf86b5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/PKG-INFO @@ -0,0 +1,105 @@ +Metadata-Version: 1.2 +Name: alembic +Version: 1.0.11 +Summary: A database migration tool for SQLAlchemy. +Home-page: https://alembic.sqlalchemy.org +Author: Mike Bayer +Author-email: mike@zzzcomputing.com +License: MIT +Project-URL: Issue Tracker, https://github.com/sqlalchemy/alembic/ +Description: Alembic is a database migrations tool written by the author + of `SQLAlchemy `_. A migrations tool + offers the following functionality: + + * Can emit ALTER statements to a database in order to change + the structure of tables and other constructs + * Provides a system whereby "migration scripts" may be constructed; + each script indicates a particular series of steps that can "upgrade" a + target database to a new version, and optionally a series of steps that can + "downgrade" similarly, doing the same steps in reverse. + * Allows the scripts to execute in some sequential manner. + + The goals of Alembic are: + + * Very open ended and transparent configuration and operation. A new + Alembic environment is generated from a set of templates which is selected + among a set of options when setup first occurs. The templates then deposit a + series of scripts that define fully how database connectivity is established + and how migration scripts are invoked; the migration scripts themselves are + generated from a template within that series of scripts. The scripts can + then be further customized to define exactly how databases will be + interacted with and what structure new migration files should take. + * Full support for transactional DDL. The default scripts ensure that all + migrations occur within a transaction - for those databases which support + this (Postgresql, Microsoft SQL Server), migrations can be tested with no + need to manually undo changes upon failure. + * Minimalist script construction. Basic operations like renaming + tables/columns, adding/removing columns, changing column attributes can be + performed through one line commands like alter_column(), rename_table(), + add_constraint(). There is no need to recreate full SQLAlchemy Table + structures for simple operations like these - the functions themselves + generate minimalist schema structures behind the scenes to achieve the given + DDL sequence. + * "auto generation" of migrations. While real world migrations are far more + complex than what can be automatically determined, Alembic can still + eliminate the initial grunt work in generating new migration directives + from an altered schema. The ``--autogenerate`` feature will inspect the + current status of a database using SQLAlchemy's schema inspection + capabilities, compare it to the current state of the database model as + specified in Python, and generate a series of "candidate" migrations, + rendering them into a new migration script as Python directives. The + developer then edits the new file, adding additional directives and data + migrations as needed, to produce a finished migration. Table and column + level changes can be detected, with constraints and indexes to follow as + well. + * Full support for migrations generated as SQL scripts. Those of us who + work in corporate environments know that direct access to DDL commands on a + production database is a rare privilege, and DBAs want textual SQL scripts. + Alembic's usage model and commands are oriented towards being able to run a + series of migrations into a textual output file as easily as it runs them + directly to a database. Care must be taken in this mode to not invoke other + operations that rely upon in-memory SELECTs of rows - Alembic tries to + provide helper constructs like bulk_insert() to help with data-oriented + operations that are compatible with script-based DDL. + * Non-linear, dependency-graph versioning. Scripts are given UUID + identifiers similarly to a DVCS, and the linkage of one script to the next + is achieved via human-editable markers within the scripts themselves. + The structure of a set of migration files is considered as a + directed-acyclic graph, meaning any migration file can be dependent + on any other arbitrary set of migration files, or none at + all. Through this open-ended system, migration files can be organized + into branches, multiple roots, and mergepoints, without restriction. + Commands are provided to produce new branches, roots, and merges of + branches automatically. + * Provide a library of ALTER constructs that can be used by any SQLAlchemy + application. The DDL constructs build upon SQLAlchemy's own DDLElement base + and can be used standalone by any application or script. + * At long last, bring SQLite and its inablity to ALTER things into the fold, + but in such a way that SQLite's very special workflow needs are accommodated + in an explicit way that makes the most of a bad situation, through the + concept of a "batch" migration, where multiple changes to a table can + be batched together to form a series of instructions for a single, subsequent + "move-and-copy" workflow. You can even use "move-and-copy" workflow for + other databases, if you want to recreate a table in the background + on a busy system. + + Documentation and status of Alembic is at https://alembic.sqlalchemy.org/ + + +Keywords: SQLAlchemy migrations +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* diff --git a/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/SOURCES.txt b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/SOURCES.txt new file mode 100644 index 0000000..22d8f04 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/SOURCES.txt @@ -0,0 +1,205 @@ +CHANGES +LICENSE +MANIFEST.in +README.rst +README.unittests.rst +run_tests.py +setup.cfg +setup.py +tox.ini +alembic/__init__.py +alembic/command.py +alembic/config.py +alembic/context.py +alembic/op.py +alembic.egg-info/PKG-INFO +alembic.egg-info/SOURCES.txt +alembic.egg-info/dependency_links.txt +alembic.egg-info/entry_points.txt +alembic.egg-info/not-zip-safe +alembic.egg-info/requires.txt +alembic.egg-info/top_level.txt +alembic/autogenerate/__init__.py +alembic/autogenerate/api.py +alembic/autogenerate/compare.py +alembic/autogenerate/render.py +alembic/autogenerate/rewriter.py +alembic/ddl/__init__.py +alembic/ddl/base.py +alembic/ddl/impl.py +alembic/ddl/mssql.py +alembic/ddl/mysql.py +alembic/ddl/oracle.py +alembic/ddl/postgresql.py +alembic/ddl/sqlite.py +alembic/operations/__init__.py +alembic/operations/base.py +alembic/operations/batch.py +alembic/operations/ops.py +alembic/operations/schemaobj.py +alembic/operations/toimpl.py +alembic/runtime/__init__.py +alembic/runtime/environment.py +alembic/runtime/migration.py +alembic/script/__init__.py +alembic/script/base.py +alembic/script/revision.py +alembic/templates/generic/README +alembic/templates/generic/alembic.ini.mako +alembic/templates/generic/env.py +alembic/templates/generic/script.py.mako +alembic/templates/multidb/README +alembic/templates/multidb/alembic.ini.mako +alembic/templates/multidb/env.py +alembic/templates/multidb/script.py.mako +alembic/templates/pylons/README +alembic/templates/pylons/alembic.ini.mako +alembic/templates/pylons/env.py +alembic/templates/pylons/script.py.mako +alembic/testing/__init__.py +alembic/testing/assertions.py +alembic/testing/compat.py +alembic/testing/config.py +alembic/testing/engines.py +alembic/testing/env.py +alembic/testing/exclusions.py +alembic/testing/fixtures.py +alembic/testing/mock.py +alembic/testing/provision.py +alembic/testing/requirements.py +alembic/testing/runner.py +alembic/testing/util.py +alembic/testing/warnings.py +alembic/testing/plugin/__init__.py +alembic/testing/plugin/bootstrap.py +alembic/testing/plugin/noseplugin.py +alembic/testing/plugin/plugin_base.py +alembic/testing/plugin/pytestplugin.py +alembic/util/__init__.py +alembic/util/compat.py +alembic/util/exc.py +alembic/util/langhelpers.py +alembic/util/messaging.py +alembic/util/pyfiles.py +alembic/util/sqla_compat.py +docs/autogenerate.html +docs/batch.html +docs/branches.html +docs/changelog.html +docs/cookbook.html +docs/front.html +docs/genindex.html +docs/index.html +docs/naming.html +docs/offline.html +docs/ops.html +docs/py-modindex.html +docs/search.html +docs/searchindex.js +docs/tutorial.html +docs/_images/api_overview.png +docs/_sources/autogenerate.rst.txt +docs/_sources/batch.rst.txt +docs/_sources/branches.rst.txt +docs/_sources/changelog.rst.txt +docs/_sources/cookbook.rst.txt +docs/_sources/front.rst.txt +docs/_sources/index.rst.txt +docs/_sources/naming.rst.txt +docs/_sources/offline.rst.txt +docs/_sources/ops.rst.txt +docs/_sources/tutorial.rst.txt +docs/_sources/api/autogenerate.rst.txt +docs/_sources/api/commands.rst.txt +docs/_sources/api/config.rst.txt +docs/_sources/api/ddl.rst.txt +docs/_sources/api/index.rst.txt +docs/_sources/api/operations.rst.txt +docs/_sources/api/overview.rst.txt +docs/_sources/api/runtime.rst.txt +docs/_sources/api/script.rst.txt +docs/_static/basic.css +docs/_static/changelog.css +docs/_static/doctools.js +docs/_static/documentation_options.js +docs/_static/file.png +docs/_static/jquery-3.2.1.js +docs/_static/jquery.js +docs/_static/language_data.js +docs/_static/minus.png +docs/_static/nature.css +docs/_static/nature_override.css +docs/_static/plus.png +docs/_static/pygments.css +docs/_static/searchtools.js +docs/_static/site_custom_css.css +docs/_static/sphinx_paramlinks.css +docs/_static/underscore-1.3.1.js +docs/_static/underscore.js +docs/api/autogenerate.html +docs/api/commands.html +docs/api/config.html +docs/api/ddl.html +docs/api/index.html +docs/api/operations.html +docs/api/overview.html +docs/api/runtime.html +docs/api/script.html +docs/build/Makefile +docs/build/autogenerate.rst +docs/build/batch.rst +docs/build/branches.rst +docs/build/changelog.rst +docs/build/conf.py +docs/build/cookbook.rst +docs/build/front.rst +docs/build/index.rst +docs/build/naming.rst +docs/build/offline.rst +docs/build/ops.rst +docs/build/requirements.txt +docs/build/tutorial.rst +docs/build/_static/nature_override.css +docs/build/_static/site_custom_css.css +docs/build/_templates/site_custom_sidebars.html +docs/build/api/api_overview.png +docs/build/api/autogenerate.rst +docs/build/api/commands.rst +docs/build/api/config.rst +docs/build/api/ddl.rst +docs/build/api/index.rst +docs/build/api/operations.rst +docs/build/api/overview.rst +docs/build/api/runtime.rst +docs/build/api/script.rst +docs/build/unreleased/README.txt +tests/__init__.py +tests/_autogen_fixtures.py +tests/_large_map.py +tests/conftest.py +tests/requirements.py +tests/test_autogen_comments.py +tests/test_autogen_composition.py +tests/test_autogen_diffs.py +tests/test_autogen_fks.py +tests/test_autogen_indexes.py +tests/test_autogen_render.py +tests/test_batch.py +tests/test_bulk_insert.py +tests/test_command.py +tests/test_config.py +tests/test_environment.py +tests/test_external_dialect.py +tests/test_mssql.py +tests/test_mysql.py +tests/test_offline_environment.py +tests/test_op.py +tests/test_op_naming_convention.py +tests/test_oracle.py +tests/test_postgresql.py +tests/test_revision.py +tests/test_script_consumption.py +tests/test_script_production.py +tests/test_sqlite.py +tests/test_version_table.py +tests/test_version_traversal.py \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/dependency_links.txt b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/entry_points.txt b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/entry_points.txt new file mode 100644 index 0000000..27ac374 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +alembic = alembic.config:main + diff --git a/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/installed-files.txt b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/installed-files.txt new file mode 100644 index 0000000..dd90553 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/installed-files.txt @@ -0,0 +1,135 @@ +../alembic/context.py +../alembic/config.py +../alembic/command.py +../alembic/op.py +../alembic/__init__.py +../alembic/util/compat.py +../alembic/util/sqla_compat.py +../alembic/util/pyfiles.py +../alembic/util/messaging.py +../alembic/util/__init__.py +../alembic/util/exc.py +../alembic/util/langhelpers.py +../alembic/operations/toimpl.py +../alembic/operations/base.py +../alembic/operations/schemaobj.py +../alembic/operations/ops.py +../alembic/operations/batch.py +../alembic/operations/__init__.py +../alembic/testing/compat.py +../alembic/testing/env.py +../alembic/testing/runner.py +../alembic/testing/fixtures.py +../alembic/testing/config.py +../alembic/testing/requirements.py +../alembic/testing/assertions.py +../alembic/testing/engines.py +../alembic/testing/exclusions.py +../alembic/testing/__init__.py +../alembic/testing/util.py +../alembic/testing/warnings.py +../alembic/testing/provision.py +../alembic/testing/mock.py +../alembic/script/revision.py +../alembic/script/base.py +../alembic/script/__init__.py +../alembic/ddl/oracle.py +../alembic/ddl/mssql.py +../alembic/ddl/mysql.py +../alembic/ddl/impl.py +../alembic/ddl/postgresql.py +../alembic/ddl/sqlite.py +../alembic/ddl/base.py +../alembic/ddl/__init__.py +../alembic/autogenerate/compare.py +../alembic/autogenerate/render.py +../alembic/autogenerate/api.py +../alembic/autogenerate/__init__.py +../alembic/autogenerate/rewriter.py +../alembic/runtime/migration.py +../alembic/runtime/__init__.py +../alembic/runtime/environment.py +../alembic/testing/plugin/noseplugin.py +../alembic/testing/plugin/bootstrap.py +../alembic/testing/plugin/pytestplugin.py +../alembic/testing/plugin/__init__.py +../alembic/testing/plugin/plugin_base.py +../alembic/templates/generic/README +../alembic/templates/generic/alembic.ini.mako +../alembic/templates/generic/env.py +../alembic/templates/generic/script.py.mako +../alembic/templates/multidb/README +../alembic/templates/multidb/alembic.ini.mako +../alembic/templates/multidb/env.py +../alembic/templates/multidb/script.py.mako +../alembic/templates/pylons/README +../alembic/templates/pylons/alembic.ini.mako +../alembic/templates/pylons/env.py +../alembic/templates/pylons/script.py.mako +../alembic/__pycache__/context.cpython-36.pyc +../alembic/__pycache__/config.cpython-36.pyc +../alembic/__pycache__/command.cpython-36.pyc +../alembic/__pycache__/op.cpython-36.pyc +../alembic/__pycache__/__init__.cpython-36.pyc +../alembic/util/__pycache__/compat.cpython-36.pyc +../alembic/util/__pycache__/sqla_compat.cpython-36.pyc +../alembic/util/__pycache__/pyfiles.cpython-36.pyc +../alembic/util/__pycache__/messaging.cpython-36.pyc +../alembic/util/__pycache__/__init__.cpython-36.pyc +../alembic/util/__pycache__/exc.cpython-36.pyc +../alembic/util/__pycache__/langhelpers.cpython-36.pyc +../alembic/operations/__pycache__/toimpl.cpython-36.pyc +../alembic/operations/__pycache__/base.cpython-36.pyc +../alembic/operations/__pycache__/schemaobj.cpython-36.pyc +../alembic/operations/__pycache__/ops.cpython-36.pyc +../alembic/operations/__pycache__/batch.cpython-36.pyc +../alembic/operations/__pycache__/__init__.cpython-36.pyc +../alembic/testing/__pycache__/compat.cpython-36.pyc +../alembic/testing/__pycache__/env.cpython-36.pyc +../alembic/testing/__pycache__/runner.cpython-36.pyc +../alembic/testing/__pycache__/fixtures.cpython-36.pyc +../alembic/testing/__pycache__/config.cpython-36.pyc +../alembic/testing/__pycache__/requirements.cpython-36.pyc +../alembic/testing/__pycache__/assertions.cpython-36.pyc +../alembic/testing/__pycache__/engines.cpython-36.pyc +../alembic/testing/__pycache__/exclusions.cpython-36.pyc +../alembic/testing/__pycache__/__init__.cpython-36.pyc +../alembic/testing/__pycache__/util.cpython-36.pyc +../alembic/testing/__pycache__/warnings.cpython-36.pyc +../alembic/testing/__pycache__/provision.cpython-36.pyc +../alembic/testing/__pycache__/mock.cpython-36.pyc +../alembic/script/__pycache__/revision.cpython-36.pyc +../alembic/script/__pycache__/base.cpython-36.pyc +../alembic/script/__pycache__/__init__.cpython-36.pyc +../alembic/ddl/__pycache__/oracle.cpython-36.pyc +../alembic/ddl/__pycache__/mssql.cpython-36.pyc +../alembic/ddl/__pycache__/mysql.cpython-36.pyc +../alembic/ddl/__pycache__/impl.cpython-36.pyc +../alembic/ddl/__pycache__/postgresql.cpython-36.pyc +../alembic/ddl/__pycache__/sqlite.cpython-36.pyc +../alembic/ddl/__pycache__/base.cpython-36.pyc +../alembic/ddl/__pycache__/__init__.cpython-36.pyc +../alembic/autogenerate/__pycache__/compare.cpython-36.pyc +../alembic/autogenerate/__pycache__/render.cpython-36.pyc +../alembic/autogenerate/__pycache__/api.cpython-36.pyc +../alembic/autogenerate/__pycache__/__init__.cpython-36.pyc +../alembic/autogenerate/__pycache__/rewriter.cpython-36.pyc +../alembic/runtime/__pycache__/migration.cpython-36.pyc +../alembic/runtime/__pycache__/__init__.cpython-36.pyc +../alembic/runtime/__pycache__/environment.cpython-36.pyc +../alembic/testing/plugin/__pycache__/noseplugin.cpython-36.pyc +../alembic/testing/plugin/__pycache__/bootstrap.cpython-36.pyc +../alembic/testing/plugin/__pycache__/pytestplugin.cpython-36.pyc +../alembic/testing/plugin/__pycache__/__init__.cpython-36.pyc +../alembic/testing/plugin/__pycache__/plugin_base.cpython-36.pyc +../alembic/templates/generic/__pycache__/env.cpython-36.pyc +../alembic/templates/multidb/__pycache__/env.cpython-36.pyc +../alembic/templates/pylons/__pycache__/env.cpython-36.pyc +PKG-INFO +requires.txt +not-zip-safe +entry_points.txt +top_level.txt +SOURCES.txt +dependency_links.txt +../../../../bin/alembic diff --git a/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/not-zip-safe b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/requires.txt b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/requires.txt new file mode 100644 index 0000000..95ed471 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/requires.txt @@ -0,0 +1,4 @@ +Mako +SQLAlchemy>=0.9.0 +python-dateutil +python-editor>=0.3 diff --git a/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/top_level.txt b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/top_level.txt new file mode 100644 index 0000000..b5bd98d --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic-1.0.11.egg-info/top_level.txt @@ -0,0 +1 @@ +alembic diff --git a/py3/lib/python3.6/site-packages/alembic/__init__.py b/py3/lib/python3.6/site-packages/alembic/__init__.py new file mode 100644 index 0000000..7fa2e83 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/__init__.py @@ -0,0 +1,14 @@ +from os import path +import sys + +from . import context # noqa +from . import op # noqa +from .runtime import environment +from .runtime import migration + +__version__ = '1.0.11' + +package_dir = path.abspath(path.dirname(__file__)) + +sys.modules["alembic.migration"] = migration +sys.modules["alembic.environment"] = environment diff --git a/py3/lib/python3.6/site-packages/alembic/autogenerate/__init__.py b/py3/lib/python3.6/site-packages/alembic/autogenerate/__init__.py new file mode 100644 index 0000000..a0f8ec2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/autogenerate/__init__.py @@ -0,0 +1,10 @@ +from .api import _render_migration_diffs # noqa +from .api import compare_metadata # noqa +from .api import produce_migrations # noqa +from .api import render_python_code # noqa +from .api import RevisionContext # noqa +from .compare import _produce_net_changes # noqa +from .compare import comparators # noqa +from .render import render_op_text # noqa +from .render import renderers # noqa +from .rewriter import Rewriter # noqa diff --git a/py3/lib/python3.6/site-packages/alembic/autogenerate/api.py b/py3/lib/python3.6/site-packages/alembic/autogenerate/api.py new file mode 100644 index 0000000..709e97d --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/autogenerate/api.py @@ -0,0 +1,504 @@ +"""Provide the 'autogenerate' feature which can produce migration operations +automatically.""" + +import contextlib + +from sqlalchemy.engine.reflection import Inspector + +from . import compare +from . import render +from .. import util +from ..operations import ops + + +def compare_metadata(context, metadata): + """Compare a database schema to that given in a + :class:`~sqlalchemy.schema.MetaData` instance. + + The database connection is presented in the context + of a :class:`.MigrationContext` object, which + provides database connectivity as well as optional + comparison functions to use for datatypes and + server defaults - see the "autogenerate" arguments + at :meth:`.EnvironmentContext.configure` + for details on these. + + The return format is a list of "diff" directives, + each representing individual differences:: + + from alembic.migration import MigrationContext + from alembic.autogenerate import compare_metadata + from sqlalchemy.schema import SchemaItem + from sqlalchemy.types import TypeEngine + from sqlalchemy import (create_engine, MetaData, Column, + Integer, String, Table) + import pprint + + engine = create_engine("sqlite://") + + engine.execute(''' + create table foo ( + id integer not null primary key, + old_data varchar, + x integer + )''') + + engine.execute(''' + create table bar ( + data varchar + )''') + + metadata = MetaData() + Table('foo', metadata, + Column('id', Integer, primary_key=True), + Column('data', Integer), + Column('x', Integer, nullable=False) + ) + Table('bat', metadata, + Column('info', String) + ) + + mc = MigrationContext.configure(engine.connect()) + + diff = compare_metadata(mc, metadata) + pprint.pprint(diff, indent=2, width=20) + + Output:: + + [ ( 'add_table', + Table('bat', MetaData(bind=None), + Column('info', String(), table=), schema=None)), + ( 'remove_table', + Table(u'bar', MetaData(bind=None), + Column(u'data', VARCHAR(), table=), schema=None)), + ( 'add_column', + None, + 'foo', + Column('data', Integer(), table=)), + ( 'remove_column', + None, + 'foo', + Column(u'old_data', VARCHAR(), table=None)), + [ ( 'modify_nullable', + None, + 'foo', + u'x', + { 'existing_server_default': None, + 'existing_type': INTEGER()}, + True, + False)]] + + + :param context: a :class:`.MigrationContext` + instance. + :param metadata: a :class:`~sqlalchemy.schema.MetaData` + instance. + + .. seealso:: + + :func:`.produce_migrations` - produces a :class:`.MigrationScript` + structure based on metadata comparison. + + """ + + migration_script = produce_migrations(context, metadata) + return migration_script.upgrade_ops.as_diffs() + + +def produce_migrations(context, metadata): + """Produce a :class:`.MigrationScript` structure based on schema + comparison. + + This function does essentially what :func:`.compare_metadata` does, + but then runs the resulting list of diffs to produce the full + :class:`.MigrationScript` object. For an example of what this looks like, + see the example in :ref:`customizing_revision`. + + .. versionadded:: 0.8.0 + + .. seealso:: + + :func:`.compare_metadata` - returns more fundamental "diff" + data from comparing a schema. + + """ + + autogen_context = AutogenContext(context, metadata=metadata) + + migration_script = ops.MigrationScript( + rev_id=None, + upgrade_ops=ops.UpgradeOps([]), + downgrade_ops=ops.DowngradeOps([]), + ) + + compare._populate_migration_script(autogen_context, migration_script) + + return migration_script + + +def render_python_code( + up_or_down_op, + sqlalchemy_module_prefix="sa.", + alembic_module_prefix="op.", + render_as_batch=False, + imports=(), + render_item=None, +): + """Render Python code given an :class:`.UpgradeOps` or + :class:`.DowngradeOps` object. + + This is a convenience function that can be used to test the + autogenerate output of a user-defined :class:`.MigrationScript` structure. + + """ + opts = { + "sqlalchemy_module_prefix": sqlalchemy_module_prefix, + "alembic_module_prefix": alembic_module_prefix, + "render_item": render_item, + "render_as_batch": render_as_batch, + } + + autogen_context = AutogenContext(None, opts=opts) + autogen_context.imports = set(imports) + return render._indent( + render._render_cmd_body(up_or_down_op, autogen_context) + ) + + +def _render_migration_diffs(context, template_args): + """legacy, used by test_autogen_composition at the moment""" + + autogen_context = AutogenContext(context) + + upgrade_ops = ops.UpgradeOps([]) + compare._produce_net_changes(autogen_context, upgrade_ops) + + migration_script = ops.MigrationScript( + rev_id=None, + upgrade_ops=upgrade_ops, + downgrade_ops=upgrade_ops.reverse(), + ) + + render._render_python_into_templatevars( + autogen_context, migration_script, template_args + ) + + +class AutogenContext(object): + """Maintains configuration and state that's specific to an + autogenerate operation.""" + + metadata = None + """The :class:`~sqlalchemy.schema.MetaData` object + representing the destination. + + This object is the one that is passed within ``env.py`` + to the :paramref:`.EnvironmentContext.configure.target_metadata` + parameter. It represents the structure of :class:`.Table` and other + objects as stated in the current database model, and represents the + destination structure for the database being examined. + + While the :class:`~sqlalchemy.schema.MetaData` object is primarily + known as a collection of :class:`~sqlalchemy.schema.Table` objects, + it also has an :attr:`~sqlalchemy.schema.MetaData.info` dictionary + that may be used by end-user schemes to store additional schema-level + objects that are to be compared in custom autogeneration schemes. + + """ + + connection = None + """The :class:`~sqlalchemy.engine.base.Connection` object currently + connected to the database backend being compared. + + This is obtained from the :attr:`.MigrationContext.bind` and is + utimately set up in the ``env.py`` script. + + """ + + dialect = None + """The :class:`~sqlalchemy.engine.Dialect` object currently in use. + + This is normally obtained from the + :attr:`~sqlalchemy.engine.base.Connection.dialect` attribute. + + """ + + imports = None + """A ``set()`` which contains string Python import directives. + + The directives are to be rendered into the ``${imports}`` section + of a script template. The set is normally empty and can be modified + within hooks such as the + :paramref:`.EnvironmentContext.configure.render_item` hook. + + .. versionadded:: 0.8.3 + + .. seealso:: + + :ref:`autogen_render_types` + + """ + + migration_context = None + """The :class:`.MigrationContext` established by the ``env.py`` script.""" + + def __init__( + self, migration_context, metadata=None, opts=None, autogenerate=True + ): + + if ( + autogenerate + and migration_context is not None + and migration_context.as_sql + ): + raise util.CommandError( + "autogenerate can't use as_sql=True as it prevents querying " + "the database for schema information" + ) + + if opts is None: + opts = migration_context.opts + + self.metadata = metadata = ( + opts.get("target_metadata", None) if metadata is None else metadata + ) + + if ( + autogenerate + and metadata is None + and migration_context is not None + and migration_context.script is not None + ): + raise util.CommandError( + "Can't proceed with --autogenerate option; environment " + "script %s does not provide " + "a MetaData object or sequence of objects to the context." + % (migration_context.script.env_py_location) + ) + + include_symbol = opts.get("include_symbol", None) + include_object = opts.get("include_object", None) + + object_filters = [] + if include_symbol: + + def include_symbol_filter( + object_, name, type_, reflected, compare_to + ): + if type_ == "table": + return include_symbol(name, object_.schema) + else: + return True + + object_filters.append(include_symbol_filter) + if include_object: + object_filters.append(include_object) + + self._object_filters = object_filters + + self.migration_context = migration_context + if self.migration_context is not None: + self.connection = self.migration_context.bind + self.dialect = self.migration_context.dialect + + self.imports = set() + self.opts = opts + self._has_batch = False + + @util.memoized_property + def inspector(self): + return Inspector.from_engine(self.connection) + + @contextlib.contextmanager + def _within_batch(self): + self._has_batch = True + yield + self._has_batch = False + + def run_filters(self, object_, name, type_, reflected, compare_to): + """Run the context's object filters and return True if the targets + should be part of the autogenerate operation. + + This method should be run for every kind of object encountered within + an autogenerate operation, giving the environment the chance + to filter what objects should be included in the comparison. + The filters here are produced directly via the + :paramref:`.EnvironmentContext.configure.include_object` + and :paramref:`.EnvironmentContext.configure.include_symbol` + functions, if present. + + """ + for fn in self._object_filters: + if not fn(object_, name, type_, reflected, compare_to): + return False + else: + return True + + @util.memoized_property + def sorted_tables(self): + """Return an aggregate of the :attr:`.MetaData.sorted_tables` collection(s). + + For a sequence of :class:`.MetaData` objects, this + concatenates the :attr:`.MetaData.sorted_tables` collection + for each individual :class:`.MetaData` in the order of the + sequence. It does **not** collate the sorted tables collections. + + .. versionadded:: 0.9.0 + + """ + result = [] + for m in util.to_list(self.metadata): + result.extend(m.sorted_tables) + return result + + @util.memoized_property + def table_key_to_table(self): + """Return an aggregate of the :attr:`.MetaData.tables` dictionaries. + + The :attr:`.MetaData.tables` collection is a dictionary of table key + to :class:`.Table`; this method aggregates the dictionary across + multiple :class:`.MetaData` objects into one dictionary. + + Duplicate table keys are **not** supported; if two :class:`.MetaData` + objects contain the same table key, an exception is raised. + + .. versionadded:: 0.9.0 + + """ + result = {} + for m in util.to_list(self.metadata): + intersect = set(result).intersection(set(m.tables)) + if intersect: + raise ValueError( + "Duplicate table keys across multiple " + "MetaData objects: %s" + % (", ".join('"%s"' % key for key in sorted(intersect))) + ) + + result.update(m.tables) + return result + + +class RevisionContext(object): + """Maintains configuration and state that's specific to a revision + file generation operation.""" + + def __init__( + self, + config, + script_directory, + command_args, + process_revision_directives=None, + ): + self.config = config + self.script_directory = script_directory + self.command_args = command_args + self.process_revision_directives = process_revision_directives + self.template_args = { + "config": config # Let templates use config for + # e.g. multiple databases + } + self.generated_revisions = [self._default_revision()] + + def _to_script(self, migration_script): + template_args = {} + for k, v in self.template_args.items(): + template_args.setdefault(k, v) + + if getattr(migration_script, "_needs_render", False): + autogen_context = self._last_autogen_context + + # clear out existing imports if we are doing multiple + # renders + autogen_context.imports = set() + if migration_script.imports: + autogen_context.imports.update(migration_script.imports) + render._render_python_into_templatevars( + autogen_context, migration_script, template_args + ) + + return self.script_directory.generate_revision( + migration_script.rev_id, + migration_script.message, + refresh=True, + head=migration_script.head, + splice=migration_script.splice, + branch_labels=migration_script.branch_label, + version_path=migration_script.version_path, + depends_on=migration_script.depends_on, + **template_args + ) + + def run_autogenerate(self, rev, migration_context): + self._run_environment(rev, migration_context, True) + + def run_no_autogenerate(self, rev, migration_context): + self._run_environment(rev, migration_context, False) + + def _run_environment(self, rev, migration_context, autogenerate): + if autogenerate: + if self.command_args["sql"]: + raise util.CommandError( + "Using --sql with --autogenerate does not make any sense" + ) + if set(self.script_directory.get_revisions(rev)) != set( + self.script_directory.get_revisions("heads") + ): + raise util.CommandError("Target database is not up to date.") + + upgrade_token = migration_context.opts["upgrade_token"] + downgrade_token = migration_context.opts["downgrade_token"] + + migration_script = self.generated_revisions[-1] + if not getattr(migration_script, "_needs_render", False): + migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token + migration_script.downgrade_ops_list[ + -1 + ].downgrade_token = downgrade_token + migration_script._needs_render = True + else: + migration_script._upgrade_ops.append( + ops.UpgradeOps([], upgrade_token=upgrade_token) + ) + migration_script._downgrade_ops.append( + ops.DowngradeOps([], downgrade_token=downgrade_token) + ) + + self._last_autogen_context = autogen_context = AutogenContext( + migration_context, autogenerate=autogenerate + ) + + if autogenerate: + compare._populate_migration_script( + autogen_context, migration_script + ) + + if self.process_revision_directives: + self.process_revision_directives( + migration_context, rev, self.generated_revisions + ) + + hook = migration_context.opts["process_revision_directives"] + if hook: + hook(migration_context, rev, self.generated_revisions) + + for migration_script in self.generated_revisions: + migration_script._needs_render = True + + def _default_revision(self): + op = ops.MigrationScript( + rev_id=self.command_args["rev_id"] or util.rev_id(), + message=self.command_args["message"], + upgrade_ops=ops.UpgradeOps([]), + downgrade_ops=ops.DowngradeOps([]), + head=self.command_args["head"], + splice=self.command_args["splice"], + branch_label=self.command_args["branch_label"], + version_path=self.command_args["version_path"], + depends_on=self.command_args["depends_on"], + ) + return op + + def generate_scripts(self): + for generated_revision in self.generated_revisions: + yield self._to_script(generated_revision) diff --git a/py3/lib/python3.6/site-packages/alembic/autogenerate/compare.py b/py3/lib/python3.6/site-packages/alembic/autogenerate/compare.py new file mode 100644 index 0000000..37371eb --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/autogenerate/compare.py @@ -0,0 +1,1097 @@ +import contextlib +import logging +import re + +from sqlalchemy import event +from sqlalchemy import schema as sa_schema +from sqlalchemy import types as sqltypes +from sqlalchemy.engine.reflection import Inspector +from sqlalchemy.util import OrderedSet + +from alembic.ddl.base import _fk_spec +from .render import _user_defined_render +from .. import util +from ..operations import ops +from ..util import compat +from ..util import sqla_compat + +log = logging.getLogger(__name__) + + +def _populate_migration_script(autogen_context, migration_script): + upgrade_ops = migration_script.upgrade_ops_list[-1] + downgrade_ops = migration_script.downgrade_ops_list[-1] + + _produce_net_changes(autogen_context, upgrade_ops) + upgrade_ops.reverse_into(downgrade_ops) + + +comparators = util.Dispatcher(uselist=True) + + +def _produce_net_changes(autogen_context, upgrade_ops): + + connection = autogen_context.connection + include_schemas = autogen_context.opts.get("include_schemas", False) + + inspector = Inspector.from_engine(connection) + + default_schema = connection.dialect.default_schema_name + if include_schemas: + schemas = set(inspector.get_schema_names()) + # replace default schema name with None + schemas.discard("information_schema") + # replace the "default" schema with None + schemas.discard(default_schema) + schemas.add(None) + else: + schemas = [None] + + comparators.dispatch("schema", autogen_context.dialect.name)( + autogen_context, upgrade_ops, schemas + ) + + +@comparators.dispatch_for("schema") +def _autogen_for_tables(autogen_context, upgrade_ops, schemas): + inspector = autogen_context.inspector + + conn_table_names = set() + + version_table_schema = ( + autogen_context.migration_context.version_table_schema + ) + version_table = autogen_context.migration_context.version_table + + for s in schemas: + tables = set(inspector.get_table_names(schema=s)) + if s == version_table_schema: + tables = tables.difference( + [autogen_context.migration_context.version_table] + ) + conn_table_names.update(zip([s] * len(tables), tables)) + + metadata_table_names = OrderedSet( + [(table.schema, table.name) for table in autogen_context.sorted_tables] + ).difference([(version_table_schema, version_table)]) + + _compare_tables( + conn_table_names, + metadata_table_names, + inspector, + upgrade_ops, + autogen_context, + ) + + +def _compare_tables( + conn_table_names, + metadata_table_names, + inspector, + upgrade_ops, + autogen_context, +): + + default_schema = inspector.bind.dialect.default_schema_name + + # tables coming from the connection will not have "schema" + # set if it matches default_schema_name; so we need a list + # of table names from local metadata that also have "None" if schema + # == default_schema_name. Most setups will be like this anyway but + # some are not (see #170) + metadata_table_names_no_dflt_schema = OrderedSet( + [ + (schema if schema != default_schema else None, tname) + for schema, tname in metadata_table_names + ] + ) + + # to adjust for the MetaData collection storing the tables either + # as "schemaname.tablename" or just "tablename", create a new lookup + # which will match the "non-default-schema" keys to the Table object. + tname_to_table = dict( + ( + no_dflt_schema, + autogen_context.table_key_to_table[ + sa_schema._get_table_key(tname, schema) + ], + ) + for no_dflt_schema, (schema, tname) in zip( + metadata_table_names_no_dflt_schema, metadata_table_names + ) + ) + metadata_table_names = metadata_table_names_no_dflt_schema + + for s, tname in metadata_table_names.difference(conn_table_names): + name = "%s.%s" % (s, tname) if s else tname + metadata_table = tname_to_table[(s, tname)] + if autogen_context.run_filters( + metadata_table, tname, "table", False, None + ): + upgrade_ops.ops.append( + ops.CreateTableOp.from_table(metadata_table) + ) + log.info("Detected added table %r", name) + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + + comparators.dispatch("table")( + autogen_context, + modify_table_ops, + s, + tname, + None, + metadata_table, + ) + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + removal_metadata = sa_schema.MetaData() + for s, tname in conn_table_names.difference(metadata_table_names): + name = sa_schema._get_table_key(tname, s) + exists = name in removal_metadata.tables + t = sa_schema.Table(tname, removal_metadata, schema=s) + + if not exists: + event.listen( + t, + "column_reflect", + # fmt: off + autogen_context.migration_context.impl. + _compat_autogen_column_reflect + (inspector), + # fmt: on + ) + inspector.reflecttable(t, None) + if autogen_context.run_filters(t, tname, "table", True, None): + + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + + comparators.dispatch("table")( + autogen_context, modify_table_ops, s, tname, t, None + ) + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + upgrade_ops.ops.append(ops.DropTableOp.from_table(t)) + log.info("Detected removed table %r", name) + + existing_tables = conn_table_names.intersection(metadata_table_names) + + existing_metadata = sa_schema.MetaData() + conn_column_info = {} + for s, tname in existing_tables: + name = sa_schema._get_table_key(tname, s) + exists = name in existing_metadata.tables + t = sa_schema.Table(tname, existing_metadata, schema=s) + if not exists: + event.listen( + t, + "column_reflect", + # fmt: off + autogen_context.migration_context.impl. + _compat_autogen_column_reflect(inspector), + # fmt: on + ) + inspector.reflecttable(t, None) + conn_column_info[(s, tname)] = t + + for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])): + s = s or None + name = "%s.%s" % (s, tname) if s else tname + metadata_table = tname_to_table[(s, tname)] + conn_table = existing_metadata.tables[name] + + if autogen_context.run_filters( + metadata_table, tname, "table", False, conn_table + ): + + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + with _compare_columns( + s, + tname, + conn_table, + metadata_table, + modify_table_ops, + autogen_context, + inspector, + ): + + comparators.dispatch("table")( + autogen_context, + modify_table_ops, + s, + tname, + conn_table, + metadata_table, + ) + + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + +def _make_index(params, conn_table): + ix = sa_schema.Index( + params["name"], + *[conn_table.c[cname] for cname in params["column_names"]], + unique=params["unique"] + ) + if "duplicates_constraint" in params: + ix.info["duplicates_constraint"] = params["duplicates_constraint"] + return ix + + +def _make_unique_constraint(params, conn_table): + uq = sa_schema.UniqueConstraint( + *[conn_table.c[cname] for cname in params["column_names"]], + name=params["name"] + ) + if "duplicates_index" in params: + uq.info["duplicates_index"] = params["duplicates_index"] + + return uq + + +def _make_foreign_key(params, conn_table): + tname = params["referred_table"] + if params["referred_schema"]: + tname = "%s.%s" % (params["referred_schema"], tname) + + options = params.get("options", {}) + + const = sa_schema.ForeignKeyConstraint( + [conn_table.c[cname] for cname in params["constrained_columns"]], + ["%s.%s" % (tname, n) for n in params["referred_columns"]], + onupdate=options.get("onupdate"), + ondelete=options.get("ondelete"), + deferrable=options.get("deferrable"), + initially=options.get("initially"), + name=params["name"], + ) + # needed by 0.7 + conn_table.append_constraint(const) + return const + + +@contextlib.contextmanager +def _compare_columns( + schema, + tname, + conn_table, + metadata_table, + modify_table_ops, + autogen_context, + inspector, +): + name = "%s.%s" % (schema, tname) if schema else tname + metadata_cols_by_name = dict( + (c.name, c) for c in metadata_table.c if not c.system + ) + conn_col_names = dict((c.name, c) for c in conn_table.c) + metadata_col_names = OrderedSet(sorted(metadata_cols_by_name)) + + for cname in metadata_col_names.difference(conn_col_names): + if autogen_context.run_filters( + metadata_cols_by_name[cname], cname, "column", False, None + ): + modify_table_ops.ops.append( + ops.AddColumnOp.from_column_and_tablename( + schema, tname, metadata_cols_by_name[cname] + ) + ) + log.info("Detected added column '%s.%s'", name, cname) + + for colname in metadata_col_names.intersection(conn_col_names): + metadata_col = metadata_cols_by_name[colname] + conn_col = conn_table.c[colname] + if not autogen_context.run_filters( + metadata_col, colname, "column", False, conn_col + ): + continue + alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema) + + comparators.dispatch("column")( + autogen_context, + alter_column_op, + schema, + tname, + colname, + conn_col, + metadata_col, + ) + + if alter_column_op.has_changes(): + modify_table_ops.ops.append(alter_column_op) + + yield + + for cname in set(conn_col_names).difference(metadata_col_names): + if autogen_context.run_filters( + conn_table.c[cname], cname, "column", True, None + ): + modify_table_ops.ops.append( + ops.DropColumnOp.from_column_and_tablename( + schema, tname, conn_table.c[cname] + ) + ) + log.info("Detected removed column '%s.%s'", name, cname) + + +class _constraint_sig(object): + def md_name_to_sql_name(self, context): + return self.name + + def __eq__(self, other): + return self.const == other.const + + def __ne__(self, other): + return self.const != other.const + + def __hash__(self): + return hash(self.const) + + +class _uq_constraint_sig(_constraint_sig): + is_index = False + is_unique = True + + def __init__(self, const): + self.const = const + self.name = const.name + self.sig = tuple(sorted([col.name for col in const.columns])) + + @property + def column_names(self): + return [col.name for col in self.const.columns] + + +class _ix_constraint_sig(_constraint_sig): + is_index = True + + def __init__(self, const): + self.const = const + self.name = const.name + self.sig = tuple(sorted([col.name for col in const.columns])) + self.is_unique = bool(const.unique) + + def md_name_to_sql_name(self, context): + return sqla_compat._get_index_final_name(context.dialect, self.const) + + @property + def column_names(self): + return sqla_compat._get_index_column_names(self.const) + + +class _fk_constraint_sig(_constraint_sig): + def __init__(self, const, include_options=False): + self.const = const + self.name = const.name + + ( + self.source_schema, + self.source_table, + self.source_columns, + self.target_schema, + self.target_table, + self.target_columns, + onupdate, + ondelete, + deferrable, + initially, + ) = _fk_spec(const) + + self.sig = ( + self.source_schema, + self.source_table, + tuple(self.source_columns), + self.target_schema, + self.target_table, + tuple(self.target_columns), + ) + if include_options: + self.sig += ( + (None if onupdate.lower() == "no action" else onupdate.lower()) + if onupdate + else None, + (None if ondelete.lower() == "no action" else ondelete.lower()) + if ondelete + else None, + # convert initially + deferrable into one three-state value + "initially_deferrable" + if initially and initially.lower() == "deferred" + else "deferrable" + if deferrable + else "not deferrable", + ) + + +@comparators.dispatch_for("table") +def _compare_indexes_and_uniques( + autogen_context, modify_ops, schema, tname, conn_table, metadata_table +): + + inspector = autogen_context.inspector + is_create_table = conn_table is None + is_drop_table = metadata_table is None + + # 1a. get raw indexes and unique constraints from metadata ... + if metadata_table is not None: + metadata_unique_constraints = set( + uq + for uq in metadata_table.constraints + if isinstance(uq, sa_schema.UniqueConstraint) + ) + metadata_indexes = set(metadata_table.indexes) + else: + metadata_unique_constraints = set() + metadata_indexes = set() + + conn_uniques = conn_indexes = frozenset() + + supports_unique_constraints = False + + unique_constraints_duplicate_unique_indexes = False + + if conn_table is not None: + # 1b. ... and from connection, if the table exists + if hasattr(inspector, "get_unique_constraints"): + try: + conn_uniques = inspector.get_unique_constraints( + tname, schema=schema + ) + supports_unique_constraints = True + except NotImplementedError: + pass + except TypeError: + # number of arguments is off for the base + # method in SQLAlchemy due to the cache decorator + # not being present + pass + else: + for uq in conn_uniques: + if uq.get("duplicates_index"): + unique_constraints_duplicate_unique_indexes = True + try: + conn_indexes = inspector.get_indexes(tname, schema=schema) + except NotImplementedError: + pass + + # 2. convert conn-level objects from raw inspector records + # into schema objects + if is_drop_table: + # for DROP TABLE uniques are inline, don't need them + conn_uniques = set() + else: + conn_uniques = set( + _make_unique_constraint(uq_def, conn_table) + for uq_def in conn_uniques + ) + + conn_indexes = set(_make_index(ix, conn_table) for ix in conn_indexes) + + # 2a. if the dialect dupes unique indexes as unique constraints + # (mysql and oracle), correct for that + + if unique_constraints_duplicate_unique_indexes: + _correct_for_uq_duplicates_uix( + conn_uniques, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ) + + # 3. give the dialect a chance to omit indexes and constraints that + # we know are either added implicitly by the DB or that the DB + # can't accurately report on + autogen_context.migration_context.impl.correct_for_autogen_constraints( + conn_uniques, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ) + + # 4. organize the constraints into "signature" collections, the + # _constraint_sig() objects provide a consistent facade over both + # Index and UniqueConstraint so we can easily work with them + # interchangeably + metadata_unique_constraints = set( + _uq_constraint_sig(uq) for uq in metadata_unique_constraints + ) + + metadata_indexes = set(_ix_constraint_sig(ix) for ix in metadata_indexes) + + conn_unique_constraints = set( + _uq_constraint_sig(uq) for uq in conn_uniques + ) + + conn_indexes = set(_ix_constraint_sig(ix) for ix in conn_indexes) + + # 5. index things by name, for those objects that have names + metadata_names = dict( + (c.md_name_to_sql_name(autogen_context), c) + for c in metadata_unique_constraints.union(metadata_indexes) + if c.name is not None + ) + + conn_uniques_by_name = dict((c.name, c) for c in conn_unique_constraints) + conn_indexes_by_name = dict((c.name, c) for c in conn_indexes) + + conn_names = dict( + (c.name, c) + for c in conn_unique_constraints.union(conn_indexes) + if c.name is not None + ) + + doubled_constraints = dict( + (name, (conn_uniques_by_name[name], conn_indexes_by_name[name])) + for name in set(conn_uniques_by_name).intersection( + conn_indexes_by_name + ) + ) + + # 6. index things by "column signature", to help with unnamed unique + # constraints. + conn_uniques_by_sig = dict((uq.sig, uq) for uq in conn_unique_constraints) + metadata_uniques_by_sig = dict( + (uq.sig, uq) for uq in metadata_unique_constraints + ) + metadata_indexes_by_sig = dict((ix.sig, ix) for ix in metadata_indexes) + unnamed_metadata_uniques = dict( + (uq.sig, uq) for uq in metadata_unique_constraints if uq.name is None + ) + + # assumptions: + # 1. a unique constraint or an index from the connection *always* + # has a name. + # 2. an index on the metadata side *always* has a name. + # 3. a unique constraint on the metadata side *might* have a name. + # 4. The backend may double up indexes as unique constraints and + # vice versa (e.g. MySQL, Postgresql) + + def obj_added(obj): + if obj.is_index: + if autogen_context.run_filters( + obj.const, obj.name, "index", False, None + ): + modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const)) + log.info( + "Detected added index '%s' on %s", + obj.name, + ", ".join(["'%s'" % obj.column_names]), + ) + else: + if not supports_unique_constraints: + # can't report unique indexes as added if we don't + # detect them + return + if is_create_table or is_drop_table: + # unique constraints are created inline with table defs + return + if autogen_context.run_filters( + obj.const, obj.name, "unique_constraint", False, None + ): + modify_ops.ops.append( + ops.AddConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected added unique constraint '%s' on %s", + obj.name, + ", ".join(["'%s'" % obj.column_names]), + ) + + def obj_removed(obj): + if obj.is_index: + if obj.is_unique and not supports_unique_constraints: + # many databases double up unique constraints + # as unique indexes. without that list we can't + # be sure what we're doing here + return + + if autogen_context.run_filters( + obj.const, obj.name, "index", True, None + ): + modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const)) + log.info( + "Detected removed index '%s' on '%s'", obj.name, tname + ) + else: + if is_create_table or is_drop_table: + # if the whole table is being dropped, we don't need to + # consider unique constraint separately + return + if autogen_context.run_filters( + obj.const, obj.name, "unique_constraint", True, None + ): + modify_ops.ops.append( + ops.DropConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected removed unique constraint '%s' on '%s'", + obj.name, + tname, + ) + + def obj_changed(old, new, msg): + if old.is_index: + if autogen_context.run_filters( + new.const, new.name, "index", False, old.const + ): + log.info( + "Detected changed index '%s' on '%s':%s", + old.name, + tname, + ", ".join(msg), + ) + modify_ops.ops.append(ops.DropIndexOp.from_index(old.const)) + modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const)) + else: + if autogen_context.run_filters( + new.const, new.name, "unique_constraint", False, old.const + ): + log.info( + "Detected changed unique constraint '%s' on '%s':%s", + old.name, + tname, + ", ".join(msg), + ) + modify_ops.ops.append( + ops.DropConstraintOp.from_constraint(old.const) + ) + modify_ops.ops.append( + ops.AddConstraintOp.from_constraint(new.const) + ) + + for added_name in sorted(set(metadata_names).difference(conn_names)): + obj = metadata_names[added_name] + obj_added(obj) + + for existing_name in sorted(set(metadata_names).intersection(conn_names)): + metadata_obj = metadata_names[existing_name] + + if existing_name in doubled_constraints: + conn_uq, conn_idx = doubled_constraints[existing_name] + if metadata_obj.is_index: + conn_obj = conn_idx + else: + conn_obj = conn_uq + else: + conn_obj = conn_names[existing_name] + + if conn_obj.is_index != metadata_obj.is_index: + obj_removed(conn_obj) + obj_added(metadata_obj) + else: + msg = [] + if conn_obj.is_unique != metadata_obj.is_unique: + msg.append( + " unique=%r to unique=%r" + % (conn_obj.is_unique, metadata_obj.is_unique) + ) + if conn_obj.sig != metadata_obj.sig: + msg.append( + " columns %r to %r" % (conn_obj.sig, metadata_obj.sig) + ) + + if msg: + obj_changed(conn_obj, metadata_obj, msg) + + for removed_name in sorted(set(conn_names).difference(metadata_names)): + conn_obj = conn_names[removed_name] + if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques: + continue + elif removed_name in doubled_constraints: + if ( + conn_obj.sig not in metadata_indexes_by_sig + and conn_obj.sig not in metadata_uniques_by_sig + ): + conn_uq, conn_idx = doubled_constraints[removed_name] + obj_removed(conn_uq) + obj_removed(conn_idx) + else: + obj_removed(conn_obj) + + for uq_sig in unnamed_metadata_uniques: + if uq_sig not in conn_uniques_by_sig: + obj_added(unnamed_metadata_uniques[uq_sig]) + + +def _correct_for_uq_duplicates_uix( + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, +): + # dedupe unique indexes vs. constraints, since MySQL / Oracle + # doesn't really have unique constraints as a separate construct. + # but look in the metadata and try to maintain constructs + # that already seem to be defined one way or the other + # on that side. This logic was formerly local to MySQL dialect, + # generalized to Oracle and others. See #276 + metadata_uq_names = set( + [ + cons.name + for cons in metadata_unique_constraints + if cons.name is not None + ] + ) + + unnamed_metadata_uqs = set( + [ + _uq_constraint_sig(cons).sig + for cons in metadata_unique_constraints + if cons.name is None + ] + ) + + metadata_ix_names = set( + [cons.name for cons in metadata_indexes if cons.unique] + ) + conn_ix_names = dict( + (cons.name, cons) for cons in conn_indexes if cons.unique + ) + + uqs_dupe_indexes = dict( + (cons.name, cons) + for cons in conn_unique_constraints + if cons.info["duplicates_index"] + ) + + for overlap in uqs_dupe_indexes: + if overlap not in metadata_uq_names: + if ( + _uq_constraint_sig(uqs_dupe_indexes[overlap]).sig + not in unnamed_metadata_uqs + ): + + conn_unique_constraints.discard(uqs_dupe_indexes[overlap]) + elif overlap not in metadata_ix_names: + conn_indexes.discard(conn_ix_names[overlap]) + + +@comparators.dispatch_for("column") +def _compare_nullable( + autogen_context, + alter_column_op, + schema, + tname, + cname, + conn_col, + metadata_col, +): + + # work around SQLAlchemy issue #3023 + if metadata_col.primary_key: + return + + metadata_col_nullable = metadata_col.nullable + conn_col_nullable = conn_col.nullable + alter_column_op.existing_nullable = conn_col_nullable + + if conn_col_nullable is not metadata_col_nullable: + alter_column_op.modify_nullable = metadata_col_nullable + log.info( + "Detected %s on column '%s.%s'", + "NULL" if metadata_col_nullable else "NOT NULL", + tname, + cname, + ) + + +@comparators.dispatch_for("column") +def _setup_autoincrement( + autogen_context, + alter_column_op, + schema, + tname, + cname, + conn_col, + metadata_col, +): + + if metadata_col.table._autoincrement_column is metadata_col: + alter_column_op.kw["autoincrement"] = True + elif util.sqla_110 and metadata_col.autoincrement is True: + alter_column_op.kw["autoincrement"] = True + elif metadata_col.autoincrement is False: + alter_column_op.kw["autoincrement"] = False + + +@comparators.dispatch_for("column") +def _compare_type( + autogen_context, + alter_column_op, + schema, + tname, + cname, + conn_col, + metadata_col, +): + + conn_type = conn_col.type + alter_column_op.existing_type = conn_type + metadata_type = metadata_col.type + if conn_type._type_affinity is sqltypes.NullType: + log.info( + "Couldn't determine database type " "for column '%s.%s'", + tname, + cname, + ) + return + if metadata_type._type_affinity is sqltypes.NullType: + log.info( + "Column '%s.%s' has no type within " "the model; can't compare", + tname, + cname, + ) + return + + isdiff = autogen_context.migration_context._compare_type( + conn_col, metadata_col + ) + + if isdiff: + alter_column_op.modify_type = metadata_type + log.info( + "Detected type change from %r to %r on '%s.%s'", + conn_type, + metadata_type, + tname, + cname, + ) + + +def _render_server_default_for_compare( + metadata_default, metadata_col, autogen_context +): + rendered = _user_defined_render( + "server_default", metadata_default, autogen_context + ) + if rendered is not False: + return rendered + + if isinstance(metadata_default, sa_schema.DefaultClause): + if isinstance(metadata_default.arg, compat.string_types): + metadata_default = metadata_default.arg + else: + metadata_default = str( + metadata_default.arg.compile( + dialect=autogen_context.dialect, + compile_kwargs={"literal_binds": True}, + ) + ) + if isinstance(metadata_default, compat.string_types): + if metadata_col.type._type_affinity is sqltypes.String: + metadata_default = re.sub(r"^'|'$", "", metadata_default) + return repr(metadata_default) + else: + return metadata_default + else: + return None + + +@comparators.dispatch_for("column") +def _compare_server_default( + autogen_context, + alter_column_op, + schema, + tname, + cname, + conn_col, + metadata_col, +): + + metadata_default = metadata_col.server_default + conn_col_default = conn_col.server_default + if conn_col_default is None and metadata_default is None: + return False + rendered_metadata_default = _render_server_default_for_compare( + metadata_default, metadata_col, autogen_context + ) + + rendered_conn_default = ( + conn_col.server_default.arg.text if conn_col.server_default else None + ) + + alter_column_op.existing_server_default = conn_col_default + + isdiff = autogen_context.migration_context._compare_server_default( + conn_col, + metadata_col, + rendered_metadata_default, + rendered_conn_default, + ) + if isdiff: + alter_column_op.modify_server_default = metadata_default + log.info("Detected server default on column '%s.%s'", tname, cname) + + +@comparators.dispatch_for("column") +def _compare_column_comment( + autogen_context, + alter_column_op, + schema, + tname, + cname, + conn_col, + metadata_col, +): + + if not sqla_compat._dialect_supports_comments(autogen_context.dialect): + return + + metadata_comment = metadata_col.comment + conn_col_comment = conn_col.comment + if conn_col_comment is None and metadata_comment is None: + return False + + alter_column_op.existing_comment = conn_col_comment + + if conn_col_comment != metadata_comment: + alter_column_op.modify_comment = metadata_comment + log.info("Detected column comment '%s.%s'", tname, cname) + + +@comparators.dispatch_for("table") +def _compare_foreign_keys( + autogen_context, + modify_table_ops, + schema, + tname, + conn_table, + metadata_table, +): + + # if we're doing CREATE TABLE, all FKs are created + # inline within the table def + if conn_table is None or metadata_table is None: + return + + inspector = autogen_context.inspector + metadata_fks = set( + fk + for fk in metadata_table.constraints + if isinstance(fk, sa_schema.ForeignKeyConstraint) + ) + + conn_fks = inspector.get_foreign_keys(tname, schema=schema) + + backend_reflects_fk_options = conn_fks and "options" in conn_fks[0] + + conn_fks = set(_make_foreign_key(const, conn_table) for const in conn_fks) + + # give the dialect a chance to correct the FKs to match more + # closely + autogen_context.migration_context.impl.correct_for_autogen_foreignkeys( + conn_fks, metadata_fks + ) + + metadata_fks = set( + _fk_constraint_sig(fk, include_options=backend_reflects_fk_options) + for fk in metadata_fks + ) + + conn_fks = set( + _fk_constraint_sig(fk, include_options=backend_reflects_fk_options) + for fk in conn_fks + ) + + conn_fks_by_sig = dict((c.sig, c) for c in conn_fks) + metadata_fks_by_sig = dict((c.sig, c) for c in metadata_fks) + + metadata_fks_by_name = dict( + (c.name, c) for c in metadata_fks if c.name is not None + ) + conn_fks_by_name = dict( + (c.name, c) for c in conn_fks if c.name is not None + ) + + def _add_fk(obj, compare_to): + if autogen_context.run_filters( + obj.const, obj.name, "foreign_key_constraint", False, compare_to + ): + modify_table_ops.ops.append( + ops.CreateForeignKeyOp.from_constraint(const.const) + ) + + log.info( + "Detected added foreign key (%s)(%s) on table %s%s", + ", ".join(obj.source_columns), + ", ".join(obj.target_columns), + "%s." % obj.source_schema if obj.source_schema else "", + obj.source_table, + ) + + def _remove_fk(obj, compare_to): + if autogen_context.run_filters( + obj.const, obj.name, "foreign_key_constraint", True, compare_to + ): + modify_table_ops.ops.append( + ops.DropConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected removed foreign key (%s)(%s) on table %s%s", + ", ".join(obj.source_columns), + ", ".join(obj.target_columns), + "%s." % obj.source_schema if obj.source_schema else "", + obj.source_table, + ) + + # so far it appears we don't need to do this by name at all. + # SQLite doesn't preserve constraint names anyway + + for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig): + const = conn_fks_by_sig[removed_sig] + if removed_sig not in metadata_fks_by_sig: + compare_to = ( + metadata_fks_by_name[const.name].const + if const.name in metadata_fks_by_name + else None + ) + _remove_fk(const, compare_to) + + for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig): + const = metadata_fks_by_sig[added_sig] + if added_sig not in conn_fks_by_sig: + compare_to = ( + conn_fks_by_name[const.name].const + if const.name in conn_fks_by_name + else None + ) + _add_fk(const, compare_to) + + +@comparators.dispatch_for("table") +def _compare_table_comment( + autogen_context, + modify_table_ops, + schema, + tname, + conn_table, + metadata_table, +): + + if not sqla_compat._dialect_supports_comments(autogen_context.dialect): + return + + # if we're doing CREATE TABLE, comments will be created inline + # with the create_table op. + if conn_table is None or metadata_table is None: + return + + if conn_table.comment is None and metadata_table.comment is None: + return + + if metadata_table.comment is None and conn_table.comment is not None: + modify_table_ops.ops.append( + ops.DropTableCommentOp( + tname, existing_comment=conn_table.comment, schema=schema + ) + ) + elif metadata_table.comment != conn_table.comment: + modify_table_ops.ops.append( + ops.CreateTableCommentOp( + tname, + metadata_table.comment, + existing_comment=conn_table.comment, + schema=schema, + ) + ) diff --git a/py3/lib/python3.6/site-packages/alembic/autogenerate/render.py b/py3/lib/python3.6/site-packages/alembic/autogenerate/render.py new file mode 100644 index 0000000..0e3b2d8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/autogenerate/render.py @@ -0,0 +1,884 @@ +import re + +from mako.pygen import PythonPrinter +from sqlalchemy import schema as sa_schema +from sqlalchemy import sql +from sqlalchemy import types as sqltypes + +from .. import util +from ..operations import ops +from ..util import compat +from ..util import sqla_compat +from ..util.compat import string_types +from ..util.compat import StringIO + + +MAX_PYTHON_ARGS = 255 + +try: + from sqlalchemy.sql.naming import conv + + def _render_gen_name(autogen_context, name): + if isinstance(name, conv): + return _f_name(_alembic_autogenerate_prefix(autogen_context), name) + else: + return name + + +except ImportError: + + def _render_gen_name(autogen_context, name): + return name + + +def _indent(text): + text = re.compile(r"^", re.M).sub(" ", text).strip() + text = re.compile(r" +$", re.M).sub("", text) + return text + + +def _render_python_into_templatevars( + autogen_context, migration_script, template_args +): + imports = autogen_context.imports + + for upgrade_ops, downgrade_ops in zip( + migration_script.upgrade_ops_list, migration_script.downgrade_ops_list + ): + template_args[upgrade_ops.upgrade_token] = _indent( + _render_cmd_body(upgrade_ops, autogen_context) + ) + template_args[downgrade_ops.downgrade_token] = _indent( + _render_cmd_body(downgrade_ops, autogen_context) + ) + template_args["imports"] = "\n".join(sorted(imports)) + + +default_renderers = renderers = util.Dispatcher() + + +def _render_cmd_body(op_container, autogen_context): + + buf = StringIO() + printer = PythonPrinter(buf) + + printer.writeline( + "# ### commands auto generated by Alembic - please adjust! ###" + ) + + if not op_container.ops: + printer.writeline("pass") + else: + for op in op_container.ops: + lines = render_op(autogen_context, op) + + for line in lines: + printer.writeline(line) + + printer.writeline("# ### end Alembic commands ###") + + return buf.getvalue() + + +def render_op(autogen_context, op): + renderer = renderers.dispatch(op) + lines = util.to_list(renderer(autogen_context, op)) + return lines + + +def render_op_text(autogen_context, op): + return "\n".join(render_op(autogen_context, op)) + + +@renderers.dispatch_for(ops.ModifyTableOps) +def _render_modify_table(autogen_context, op): + opts = autogen_context.opts + render_as_batch = opts.get("render_as_batch", False) + + if op.ops: + lines = [] + if render_as_batch: + with autogen_context._within_batch(): + lines.append( + "with op.batch_alter_table(%r, schema=%r) as batch_op:" + % (op.table_name, op.schema) + ) + for t_op in op.ops: + t_lines = render_op(autogen_context, t_op) + lines.extend(t_lines) + lines.append("") + else: + for t_op in op.ops: + t_lines = render_op(autogen_context, t_op) + lines.extend(t_lines) + + return lines + else: + return ["pass"] + + +@renderers.dispatch_for(ops.CreateTableCommentOp) +def _render_create_table_comment(autogen_context, op): + + templ = ( + "{prefix}create_table_comment(\n" + "{indent}'{tname}',\n" + "{indent}{comment},\n" + "{indent}existing_comment={existing},\n" + "{indent}schema={schema}\n" + ")" + ) + return templ.format( + prefix=_alembic_autogenerate_prefix(autogen_context), + tname=op.table_name, + comment="'%s'" % op.comment if op.comment is not None else None, + existing="'%s'" % op.existing_comment + if op.existing_comment is not None + else None, + schema="'%s'" % op.schema if op.schema is not None else None, + indent=" ", + ) + + +@renderers.dispatch_for(ops.DropTableCommentOp) +def _render_drop_table_comment(autogen_context, op): + + templ = ( + "{prefix}drop_table_comment(\n" + "{indent}'{tname}',\n" + "{indent}existing_comment={existing},\n" + "{indent}schema={schema}\n" + ")" + ) + return templ.format( + prefix=_alembic_autogenerate_prefix(autogen_context), + tname=op.table_name, + existing="'%s'" % op.existing_comment + if op.existing_comment is not None + else None, + schema="'%s'" % op.schema if op.schema is not None else None, + indent=" ", + ) + + +@renderers.dispatch_for(ops.CreateTableOp) +def _add_table(autogen_context, op): + table = op.to_table() + + args = [ + col + for col in [ + _render_column(col, autogen_context) for col in table.columns + ] + if col + ] + sorted( + [ + rcons + for rcons in [ + _render_constraint(cons, autogen_context) + for cons in table.constraints + ] + if rcons is not None + ] + ) + + if len(args) > MAX_PYTHON_ARGS: + args = "*[" + ",\n".join(args) + "]" + else: + args = ",\n".join(args) + + text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % { + "tablename": _ident(op.table_name), + "prefix": _alembic_autogenerate_prefix(autogen_context), + "args": args, + } + if op.schema: + text += ",\nschema=%r" % _ident(op.schema) + + comment = sqla_compat._comment_attribute(table) + if comment: + text += ",\ncomment=%r" % _ident(comment) + for k in sorted(op.kw): + text += ",\n%s=%r" % (k.replace(" ", "_"), op.kw[k]) + text += "\n)" + return text + + +@renderers.dispatch_for(ops.DropTableOp) +def _drop_table(autogen_context, op): + text = "%(prefix)sdrop_table(%(tname)r" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": _ident(op.table_name), + } + if op.schema: + text += ", schema=%r" % _ident(op.schema) + text += ")" + return text + + +@renderers.dispatch_for(ops.CreateIndexOp) +def _add_index(autogen_context, op): + index = op.to_index() + + has_batch = autogen_context._has_batch + + if has_batch: + tmpl = ( + "%(prefix)screate_index(%(name)r, [%(columns)s], " + "unique=%(unique)r%(kwargs)s)" + ) + else: + tmpl = ( + "%(prefix)screate_index(%(name)r, %(table)r, [%(columns)s], " + "unique=%(unique)r%(schema)s%(kwargs)s)" + ) + + text = tmpl % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "name": _render_gen_name(autogen_context, index.name), + "table": _ident(index.table.name), + "columns": ", ".join( + _get_index_rendered_expressions(index, autogen_context) + ), + "unique": index.unique or False, + "schema": (", schema=%r" % _ident(index.table.schema)) + if index.table.schema + else "", + "kwargs": ( + ", " + + ", ".join( + [ + "%s=%s" + % (key, _render_potential_expr(val, autogen_context)) + for key, val in index.kwargs.items() + ] + ) + ) + if len(index.kwargs) + else "", + } + return text + + +@renderers.dispatch_for(ops.DropIndexOp) +def _drop_index(autogen_context, op): + has_batch = autogen_context._has_batch + + if has_batch: + tmpl = "%(prefix)sdrop_index(%(name)r)" + else: + tmpl = ( + "%(prefix)sdrop_index(%(name)r, " + "table_name=%(table_name)r%(schema)s)" + ) + + text = tmpl % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "name": _render_gen_name(autogen_context, op.index_name), + "table_name": _ident(op.table_name), + "schema": ((", schema=%r" % _ident(op.schema)) if op.schema else ""), + } + return text + + +@renderers.dispatch_for(ops.CreateUniqueConstraintOp) +def _add_unique_constraint(autogen_context, op): + return [_uq_constraint(op.to_constraint(), autogen_context, True)] + + +@renderers.dispatch_for(ops.CreateForeignKeyOp) +def _add_fk_constraint(autogen_context, op): + + args = [repr(_render_gen_name(autogen_context, op.constraint_name))] + if not autogen_context._has_batch: + args.append(repr(_ident(op.source_table))) + + args.extend( + [ + repr(_ident(op.referent_table)), + repr([_ident(col) for col in op.local_cols]), + repr([_ident(col) for col in op.remote_cols]), + ] + ) + + kwargs = [ + "referent_schema", + "onupdate", + "ondelete", + "initially", + "deferrable", + "use_alter", + ] + if not autogen_context._has_batch: + kwargs.insert(0, "source_schema") + + for k in kwargs: + if k in op.kw: + value = op.kw[k] + if value is not None: + args.append("%s=%r" % (k, value)) + + return "%(prefix)screate_foreign_key(%(args)s)" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + + +@renderers.dispatch_for(ops.CreatePrimaryKeyOp) +def _add_pk_constraint(constraint, autogen_context): + raise NotImplementedError() + + +@renderers.dispatch_for(ops.CreateCheckConstraintOp) +def _add_check_constraint(constraint, autogen_context): + raise NotImplementedError() + + +@renderers.dispatch_for(ops.DropConstraintOp) +def _drop_constraint(autogen_context, op): + + if autogen_context._has_batch: + template = "%(prefix)sdrop_constraint" "(%(name)r, type_=%(type)r)" + else: + template = ( + "%(prefix)sdrop_constraint" + "(%(name)r, '%(table_name)s'%(schema)s, type_=%(type)r)" + ) + + text = template % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "name": _render_gen_name(autogen_context, op.constraint_name), + "table_name": _ident(op.table_name), + "type": op.constraint_type, + "schema": (", schema=%r" % _ident(op.schema)) if op.schema else "", + } + return text + + +@renderers.dispatch_for(ops.AddColumnOp) +def _add_column(autogen_context, op): + + schema, tname, column = op.schema, op.table_name, op.column + if autogen_context._has_batch: + template = "%(prefix)sadd_column(%(column)s)" + else: + template = "%(prefix)sadd_column(%(tname)r, %(column)s" + if schema: + template += ", schema=%(schema)r" + template += ")" + text = template % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": tname, + "column": _render_column(column, autogen_context), + "schema": schema, + } + return text + + +@renderers.dispatch_for(ops.DropColumnOp) +def _drop_column(autogen_context, op): + + schema, tname, column_name = op.schema, op.table_name, op.column_name + + if autogen_context._has_batch: + template = "%(prefix)sdrop_column(%(cname)r)" + else: + template = "%(prefix)sdrop_column(%(tname)r, %(cname)r" + if schema: + template += ", schema=%(schema)r" + template += ")" + + text = template % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": _ident(tname), + "cname": _ident(column_name), + "schema": _ident(schema), + } + return text + + +@renderers.dispatch_for(ops.AlterColumnOp) +def _alter_column(autogen_context, op): + + tname = op.table_name + cname = op.column_name + server_default = op.modify_server_default + type_ = op.modify_type + nullable = op.modify_nullable + comment = op.modify_comment + autoincrement = op.kw.get("autoincrement", None) + existing_type = op.existing_type + existing_nullable = op.existing_nullable + existing_comment = op.existing_comment + existing_server_default = op.existing_server_default + schema = op.schema + + indent = " " * 11 + + if autogen_context._has_batch: + template = "%(prefix)salter_column(%(cname)r" + else: + template = "%(prefix)salter_column(%(tname)r, %(cname)r" + + text = template % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": tname, + "cname": cname, + } + if existing_type is not None: + text += ",\n%sexisting_type=%s" % ( + indent, + _repr_type(existing_type, autogen_context), + ) + if server_default is not False: + rendered = _render_server_default(server_default, autogen_context) + text += ",\n%sserver_default=%s" % (indent, rendered) + + if type_ is not None: + text += ",\n%stype_=%s" % (indent, _repr_type(type_, autogen_context)) + if nullable is not None: + text += ",\n%snullable=%r" % (indent, nullable) + if comment is not False: + text += ",\n%scomment=%r" % (indent, comment) + if existing_comment is not None: + text += ",\n%sexisting_comment=%r" % (indent, existing_comment) + if nullable is None and existing_nullable is not None: + text += ",\n%sexisting_nullable=%r" % (indent, existing_nullable) + if autoincrement is not None: + text += ",\n%sautoincrement=%r" % (indent, autoincrement) + if server_default is False and existing_server_default: + rendered = _render_server_default( + existing_server_default, autogen_context + ) + text += ",\n%sexisting_server_default=%s" % (indent, rendered) + if schema and not autogen_context._has_batch: + text += ",\n%sschema=%r" % (indent, schema) + text += ")" + return text + + +class _f_name(object): + def __init__(self, prefix, name): + self.prefix = prefix + self.name = name + + def __repr__(self): + return "%sf(%r)" % (self.prefix, _ident(self.name)) + + +def _ident(name): + """produce a __repr__() object for a string identifier that may + use quoted_name() in SQLAlchemy 0.9 and greater. + + The issue worked around here is that quoted_name() doesn't have + very good repr() behavior by itself when unicode is involved. + + """ + if name is None: + return name + elif util.sqla_09 and isinstance(name, sql.elements.quoted_name): + if compat.py2k: + # the attempt to encode to ascii here isn't super ideal, + # however we are trying to cut down on an explosion of + # u'' literals only when py2k + SQLA 0.9, in particular + # makes unit tests testing code generation very difficult + try: + return name.encode("ascii") + except UnicodeError: + return compat.text_type(name) + else: + return compat.text_type(name) + elif isinstance(name, compat.string_types): + return name + + +def _render_potential_expr( + value, autogen_context, wrap_in_text=True, is_server_default=False +): + if isinstance(value, sql.ClauseElement): + + if wrap_in_text: + template = "%(prefix)stext(%(sql)r)" + else: + template = "%(sql)r" + + return template % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "sql": autogen_context.migration_context.impl.render_ddl_sql_expr( + value, is_server_default=is_server_default + ), + } + + else: + return repr(value) + + +def _get_index_rendered_expressions(idx, autogen_context): + return [ + repr(_ident(getattr(exp, "name", None))) + if isinstance(exp, sa_schema.Column) + else _render_potential_expr(exp, autogen_context) + for exp in idx.expressions + ] + + +def _uq_constraint(constraint, autogen_context, alter): + opts = [] + + has_batch = autogen_context._has_batch + + if constraint.deferrable: + opts.append(("deferrable", str(constraint.deferrable))) + if constraint.initially: + opts.append(("initially", str(constraint.initially))) + if not has_batch and alter and constraint.table.schema: + opts.append(("schema", _ident(constraint.table.schema))) + if not alter and constraint.name: + opts.append( + ("name", _render_gen_name(autogen_context, constraint.name)) + ) + + if alter: + args = [repr(_render_gen_name(autogen_context, constraint.name))] + if not has_batch: + args += [repr(_ident(constraint.table.name))] + args.append(repr([_ident(col.name) for col in constraint.columns])) + args.extend(["%s=%r" % (k, v) for k, v in opts]) + return "%(prefix)screate_unique_constraint(%(args)s)" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + else: + args = [repr(_ident(col.name)) for col in constraint.columns] + args.extend(["%s=%r" % (k, v) for k, v in opts]) + return "%(prefix)sUniqueConstraint(%(args)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + + +def _user_autogenerate_prefix(autogen_context, target): + prefix = autogen_context.opts["user_module_prefix"] + if prefix is None: + return "%s." % target.__module__ + else: + return prefix + + +def _sqlalchemy_autogenerate_prefix(autogen_context): + return autogen_context.opts["sqlalchemy_module_prefix"] or "" + + +def _alembic_autogenerate_prefix(autogen_context): + if autogen_context._has_batch: + return "batch_op." + else: + return autogen_context.opts["alembic_module_prefix"] or "" + + +def _user_defined_render(type_, object_, autogen_context): + if "render_item" in autogen_context.opts: + render = autogen_context.opts["render_item"] + if render: + rendered = render(type_, object_, autogen_context) + if rendered is not False: + return rendered + return False + + +def _render_column(column, autogen_context): + rendered = _user_defined_render("column", column, autogen_context) + if rendered is not False: + return rendered + + opts = [] + if column.server_default: + rendered = _render_server_default( + column.server_default, autogen_context + ) + if rendered: + opts.append(("server_default", rendered)) + + if ( + column.autoincrement is not None + and column.autoincrement != sqla_compat.AUTOINCREMENT_DEFAULT + ): + opts.append(("autoincrement", column.autoincrement)) + + if column.nullable is not None: + opts.append(("nullable", column.nullable)) + + if column.system: + opts.append(("system", column.system)) + + comment = sqla_compat._comment_attribute(column) + if comment: + opts.append(("comment", "%r" % comment)) + + # TODO: for non-ascii colname, assign a "key" + return "%(prefix)sColumn(%(name)r, %(type)s, %(kw)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "name": _ident(column.name), + "type": _repr_type(column.type, autogen_context), + "kw": ", ".join(["%s=%s" % (kwname, val) for kwname, val in opts]), + } + + +def _render_server_default(default, autogen_context, repr_=True): + rendered = _user_defined_render("server_default", default, autogen_context) + if rendered is not False: + return rendered + + if isinstance(default, sa_schema.DefaultClause): + if isinstance(default.arg, compat.string_types): + default = default.arg + else: + return _render_potential_expr( + default.arg, autogen_context, is_server_default=True + ) + + if isinstance(default, string_types) and repr_: + default = repr(re.sub(r"^'|'$", "", default)) + + return default + + +def _repr_type(type_, autogen_context): + rendered = _user_defined_render("type", type_, autogen_context) + if rendered is not False: + return rendered + + if hasattr(autogen_context.migration_context, "impl"): + impl_rt = autogen_context.migration_context.impl.render_type( + type_, autogen_context + ) + else: + impl_rt = None + + mod = type(type_).__module__ + imports = autogen_context.imports + if mod.startswith("sqlalchemy.dialects"): + dname = re.match(r"sqlalchemy\.dialects\.(\w+)", mod).group(1) + if imports is not None: + imports.add("from sqlalchemy.dialects import %s" % dname) + if impl_rt: + return impl_rt + else: + return "%s.%r" % (dname, type_) + elif impl_rt: + return impl_rt + elif mod.startswith("sqlalchemy."): + if "_render_%s_type" % type_.__visit_name__ in globals(): + fn = globals()["_render_%s_type" % type_.__visit_name__] + return fn(type_, autogen_context) + else: + prefix = _sqlalchemy_autogenerate_prefix(autogen_context) + return "%s%r" % (prefix, type_) + else: + prefix = _user_autogenerate_prefix(autogen_context, type_) + return "%s%r" % (prefix, type_) + + +def _render_ARRAY_type(type_, autogen_context): + return _render_type_w_subtype( + type_, autogen_context, "item_type", r"(.+?\()" + ) + + +def _render_type_w_subtype( + type_, autogen_context, attrname, regexp, prefix=None +): + outer_repr = repr(type_) + inner_type = getattr(type_, attrname, None) + if inner_type is None: + return False + + inner_repr = repr(inner_type) + + inner_repr = re.sub(r"([\(\)])", r"\\\1", inner_repr) + sub_type = _repr_type(getattr(type_, attrname), autogen_context) + outer_type = re.sub(regexp + inner_repr, r"\1%s" % sub_type, outer_repr) + + if prefix: + return "%s%s" % (prefix, outer_type) + + mod = type(type_).__module__ + if mod.startswith("sqlalchemy.dialects"): + dname = re.match(r"sqlalchemy\.dialects\.(\w+)", mod).group(1) + return "%s.%s" % (dname, outer_type) + elif mod.startswith("sqlalchemy"): + prefix = _sqlalchemy_autogenerate_prefix(autogen_context) + return "%s%s" % (prefix, outer_type) + else: + return None + + +_constraint_renderers = util.Dispatcher() + + +def _render_constraint(constraint, autogen_context): + try: + renderer = _constraint_renderers.dispatch(constraint) + except ValueError: + util.warn("No renderer is established for object %r" % constraint) + return "[Unknown Python object %r]" % constraint + else: + return renderer(constraint, autogen_context) + + +@_constraint_renderers.dispatch_for(sa_schema.PrimaryKeyConstraint) +def _render_primary_key(constraint, autogen_context): + rendered = _user_defined_render("primary_key", constraint, autogen_context) + if rendered is not False: + return rendered + + if not constraint.columns: + return None + + opts = [] + if constraint.name: + opts.append( + ("name", repr(_render_gen_name(autogen_context, constraint.name))) + ) + return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "args": ", ".join( + [repr(c.name) for c in constraint.columns] + + ["%s=%s" % (kwname, val) for kwname, val in opts] + ), + } + + +def _fk_colspec(fk, metadata_schema): + """Implement a 'safe' version of ForeignKey._get_colspec() that + won't fail if the remote table can't be resolved. + + """ + colspec = fk._get_colspec() + tokens = colspec.split(".") + tname, colname = tokens[-2:] + + if metadata_schema is not None and len(tokens) == 2: + table_fullname = "%s.%s" % (metadata_schema, tname) + else: + table_fullname = ".".join(tokens[0:-1]) + + if ( + not fk.link_to_name + and fk.parent is not None + and fk.parent.table is not None + ): + # try to resolve the remote table in order to adjust for column.key. + # the FK constraint needs to be rendered in terms of the column + # name. + parent_metadata = fk.parent.table.metadata + if table_fullname in parent_metadata.tables: + col = parent_metadata.tables[table_fullname].c.get(colname) + if col is not None: + colname = _ident(col.name) + + colspec = "%s.%s" % (table_fullname, colname) + + return colspec + + +def _populate_render_fk_opts(constraint, opts): + + if constraint.onupdate: + opts.append(("onupdate", repr(constraint.onupdate))) + if constraint.ondelete: + opts.append(("ondelete", repr(constraint.ondelete))) + if constraint.initially: + opts.append(("initially", repr(constraint.initially))) + if constraint.deferrable: + opts.append(("deferrable", repr(constraint.deferrable))) + if constraint.use_alter: + opts.append(("use_alter", repr(constraint.use_alter))) + + +@_constraint_renderers.dispatch_for(sa_schema.ForeignKeyConstraint) +def _render_foreign_key(constraint, autogen_context): + rendered = _user_defined_render("foreign_key", constraint, autogen_context) + if rendered is not False: + return rendered + + opts = [] + if constraint.name: + opts.append( + ("name", repr(_render_gen_name(autogen_context, constraint.name))) + ) + + _populate_render_fk_opts(constraint, opts) + + apply_metadata_schema = constraint.parent.metadata.schema + return ( + "%(prefix)sForeignKeyConstraint([%(cols)s], " + "[%(refcols)s], %(args)s)" + % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "cols": ", ".join( + "%r" % _ident(f.parent.name) for f in constraint.elements + ), + "refcols": ", ".join( + repr(_fk_colspec(f, apply_metadata_schema)) + for f in constraint.elements + ), + "args": ", ".join( + ["%s=%s" % (kwname, val) for kwname, val in opts] + ), + } + ) + + +@_constraint_renderers.dispatch_for(sa_schema.UniqueConstraint) +def _render_unique_constraint(constraint, autogen_context): + rendered = _user_defined_render("unique", constraint, autogen_context) + if rendered is not False: + return rendered + + return _uq_constraint(constraint, autogen_context, False) + + +@_constraint_renderers.dispatch_for(sa_schema.CheckConstraint) +def _render_check_constraint(constraint, autogen_context): + rendered = _user_defined_render("check", constraint, autogen_context) + if rendered is not False: + return rendered + + # detect the constraint being part of + # a parent type which is probably in the Table already. + # ideally SQLAlchemy would give us more of a first class + # way to detect this. + if ( + constraint._create_rule + and hasattr(constraint._create_rule, "target") + and isinstance(constraint._create_rule.target, sqltypes.TypeEngine) + ): + return None + opts = [] + if constraint.name: + opts.append( + ("name", repr(_render_gen_name(autogen_context, constraint.name))) + ) + return "%(prefix)sCheckConstraint(%(sqltext)s%(opts)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "opts": ", " + (", ".join("%s=%s" % (k, v) for k, v in opts)) + if opts + else "", + "sqltext": _render_potential_expr( + constraint.sqltext, autogen_context, wrap_in_text=False + ), + } + + +@renderers.dispatch_for(ops.ExecuteSQLOp) +def _execute_sql(autogen_context, op): + if not isinstance(op.sqltext, string_types): + raise NotImplementedError( + "Autogenerate rendering of SQL Expression language constructs " + "not supported here; please use a plain SQL string" + ) + return "op.execute(%r)" % op.sqltext + + +renderers = default_renderers.branch() diff --git a/py3/lib/python3.6/site-packages/alembic/autogenerate/rewriter.py b/py3/lib/python3.6/site-packages/alembic/autogenerate/rewriter.py new file mode 100644 index 0000000..1e9522b --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/autogenerate/rewriter.py @@ -0,0 +1,154 @@ +from alembic import util +from alembic.operations import ops + + +class Rewriter(object): + """A helper object that allows easy 'rewriting' of ops streams. + + The :class:`.Rewriter` object is intended to be passed along + to the + :paramref:`.EnvironmentContext.configure.process_revision_directives` + parameter in an ``env.py`` script. Once constructed, any number + of "rewrites" functions can be associated with it, which will be given + the opportunity to modify the structure without having to have explicit + knowledge of the overall structure. + + The function is passed the :class:`.MigrationContext` object and + ``revision`` tuple that are passed to the :paramref:`.Environment + Context.configure.process_revision_directives` function normally, + and the third argument is an individual directive of the type + noted in the decorator. The function has the choice of returning + a single op directive, which normally can be the directive that + was actually passed, or a new directive to replace it, or a list + of zero or more directives to replace it. + + .. seealso:: + + :ref:`autogen_rewriter` - usage example + + .. versionadded:: 0.8 + + """ + + _traverse = util.Dispatcher() + + _chained = None + + def __init__(self): + self.dispatch = util.Dispatcher() + + def chain(self, other): + """Produce a "chain" of this :class:`.Rewriter` to another. + + This allows two rewriters to operate serially on a stream, + e.g.:: + + writer1 = autogenerate.Rewriter() + writer2 = autogenerate.Rewriter() + + @writer1.rewrites(ops.AddColumnOp) + def add_column_nullable(context, revision, op): + op.column.nullable = True + return op + + @writer2.rewrites(ops.AddColumnOp) + def add_column_idx(context, revision, op): + idx_op = ops.CreateIndexOp( + 'ixc', op.table_name, [op.column.name]) + return [ + op, + idx_op + ] + + writer = writer1.chain(writer2) + + :param other: a :class:`.Rewriter` instance + :return: a new :class:`.Rewriter` that will run the operations + of this writer, then the "other" writer, in succession. + + """ + wr = self.__class__.__new__(self.__class__) + wr.__dict__.update(self.__dict__) + wr._chained = other + return wr + + def rewrites(self, operator): + """Register a function as rewriter for a given type. + + The function should receive three arguments, which are + the :class:`.MigrationContext`, a ``revision`` tuple, and + an op directive of the type indicated. E.g.:: + + @writer1.rewrites(ops.AddColumnOp) + def add_column_nullable(context, revision, op): + op.column.nullable = True + return op + + """ + return self.dispatch.dispatch_for(operator) + + def _rewrite(self, context, revision, directive): + try: + _rewriter = self.dispatch.dispatch(directive) + except ValueError: + _rewriter = None + yield directive + else: + for r_directive in util.to_list( + _rewriter(context, revision, directive) + ): + yield r_directive + + def __call__(self, context, revision, directives): + self.process_revision_directives(context, revision, directives) + if self._chained: + self._chained(context, revision, directives) + + @_traverse.dispatch_for(ops.MigrationScript) + def _traverse_script(self, context, revision, directive): + upgrade_ops_list = [] + for upgrade_ops in directive.upgrade_ops_list: + ret = self._traverse_for(context, revision, directive.upgrade_ops) + if len(ret) != 1: + raise ValueError( + "Can only return single object for UpgradeOps traverse" + ) + upgrade_ops_list.append(ret[0]) + directive.upgrade_ops = upgrade_ops_list + + downgrade_ops_list = [] + for downgrade_ops in directive.downgrade_ops_list: + ret = self._traverse_for( + context, revision, directive.downgrade_ops + ) + if len(ret) != 1: + raise ValueError( + "Can only return single object for DowngradeOps traverse" + ) + downgrade_ops_list.append(ret[0]) + directive.downgrade_ops = downgrade_ops_list + + @_traverse.dispatch_for(ops.OpContainer) + def _traverse_op_container(self, context, revision, directive): + self._traverse_list(context, revision, directive.ops) + + @_traverse.dispatch_for(ops.MigrateOperation) + def _traverse_any_directive(self, context, revision, directive): + pass + + def _traverse_for(self, context, revision, directive): + directives = list(self._rewrite(context, revision, directive)) + for directive in directives: + traverser = self._traverse.dispatch(directive) + traverser(self, context, revision, directive) + return directives + + def _traverse_list(self, context, revision, directives): + dest = [] + for directive in directives: + dest.extend(self._traverse_for(context, revision, directive)) + + directives[:] = dest + + def process_revision_directives(self, context, revision, directives): + self._traverse_list(context, revision, directives) diff --git a/py3/lib/python3.6/site-packages/alembic/command.py b/py3/lib/python3.6/site-packages/alembic/command.py new file mode 100644 index 0000000..3faafdd --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/command.py @@ -0,0 +1,564 @@ +import os + +from . import autogenerate as autogen +from . import util +from .runtime.environment import EnvironmentContext +from .script import ScriptDirectory + + +def list_templates(config): + """List available templates. + + :param config: a :class:`.Config` object. + + """ + + config.print_stdout("Available templates:\n") + for tempname in os.listdir(config.get_template_directory()): + with open( + os.path.join(config.get_template_directory(), tempname, "README") + ) as readme: + synopsis = next(readme) + config.print_stdout("%s - %s", tempname, synopsis) + + config.print_stdout("\nTemplates are used via the 'init' command, e.g.:") + config.print_stdout("\n alembic init --template generic ./scripts") + + +def init(config, directory, template="generic"): + """Initialize a new scripts directory. + + :param config: a :class:`.Config` object. + + :param directory: string path of the target directory + + :param template: string name of the migration environment template to + use. + + """ + + if os.access(directory, os.F_OK): + raise util.CommandError("Directory %s already exists" % directory) + + template_dir = os.path.join(config.get_template_directory(), template) + if not os.access(template_dir, os.F_OK): + raise util.CommandError("No such template %r" % template) + + util.status( + "Creating directory %s" % os.path.abspath(directory), + os.makedirs, + directory, + ) + + versions = os.path.join(directory, "versions") + util.status( + "Creating directory %s" % os.path.abspath(versions), + os.makedirs, + versions, + ) + + script = ScriptDirectory(directory) + + for file_ in os.listdir(template_dir): + file_path = os.path.join(template_dir, file_) + if file_ == "alembic.ini.mako": + config_file = os.path.abspath(config.config_file_name) + if os.access(config_file, os.F_OK): + util.msg("File %s already exists, skipping" % config_file) + else: + script._generate_template( + file_path, config_file, script_location=directory + ) + elif os.path.isfile(file_path): + output_file = os.path.join(directory, file_) + script._copy_file(file_path, output_file) + + util.msg( + "Please edit configuration/connection/logging " + "settings in %r before proceeding." % config_file + ) + + +def revision( + config, + message=None, + autogenerate=False, + sql=False, + head="head", + splice=False, + branch_label=None, + version_path=None, + rev_id=None, + depends_on=None, + process_revision_directives=None, +): + """Create a new revision file. + + :param config: a :class:`.Config` object. + + :param message: string message to apply to the revision; this is the + ``-m`` option to ``alembic revision``. + + :param autogenerate: whether or not to autogenerate the script from + the database; this is the ``--autogenerate`` option to + ``alembic revision``. + + :param sql: whether to dump the script out as a SQL string; when specified, + the script is dumped to stdout. This is the ``--sql`` option to + ``alembic revision``. + + :param head: head revision to build the new revision upon as a parent; + this is the ``--head`` option to ``alembic revision``. + + :param splice: whether or not the new revision should be made into a + new head of its own; is required when the given ``head`` is not itself + a head. This is the ``--splice`` option to ``alembic revision``. + + :param branch_label: string label to apply to the branch; this is the + ``--branch-label`` option to ``alembic revision``. + + :param version_path: string symbol identifying a specific version path + from the configuration; this is the ``--version-path`` option to + ``alembic revision``. + + :param rev_id: optional revision identifier to use instead of having + one generated; this is the ``--rev-id`` option to ``alembic revision``. + + :param depends_on: optional list of "depends on" identifiers; this is the + ``--depends-on`` option to ``alembic revision``. + + :param process_revision_directives: this is a callable that takes the + same form as the callable described at + :paramref:`.EnvironmentContext.configure.process_revision_directives`; + will be applied to the structure generated by the revision process + where it can be altered programmatically. Note that unlike all + the other parameters, this option is only available via programmatic + use of :func:`.command.revision` + + .. versionadded:: 0.9.0 + + """ + + script_directory = ScriptDirectory.from_config(config) + + command_args = dict( + message=message, + autogenerate=autogenerate, + sql=sql, + head=head, + splice=splice, + branch_label=branch_label, + version_path=version_path, + rev_id=rev_id, + depends_on=depends_on, + ) + revision_context = autogen.RevisionContext( + config, + script_directory, + command_args, + process_revision_directives=process_revision_directives, + ) + + environment = util.asbool(config.get_main_option("revision_environment")) + + if autogenerate: + environment = True + + if sql: + raise util.CommandError( + "Using --sql with --autogenerate does not make any sense" + ) + + def retrieve_migrations(rev, context): + revision_context.run_autogenerate(rev, context) + return [] + + elif environment: + + def retrieve_migrations(rev, context): + revision_context.run_no_autogenerate(rev, context) + return [] + + elif sql: + raise util.CommandError( + "Using --sql with the revision command when " + "revision_environment is not configured does not make any sense" + ) + + if environment: + with EnvironmentContext( + config, + script_directory, + fn=retrieve_migrations, + as_sql=sql, + template_args=revision_context.template_args, + revision_context=revision_context, + ): + script_directory.run_env() + + scripts = [script for script in revision_context.generate_scripts()] + if len(scripts) == 1: + return scripts[0] + else: + return scripts + + +def merge(config, revisions, message=None, branch_label=None, rev_id=None): + """Merge two revisions together. Creates a new migration file. + + .. versionadded:: 0.7.0 + + :param config: a :class:`.Config` instance + + :param message: string message to apply to the revision + + :param branch_label: string label name to apply to the new revision + + :param rev_id: hardcoded revision identifier instead of generating a new + one. + + .. seealso:: + + :ref:`branches` + + """ + + script = ScriptDirectory.from_config(config) + template_args = { + "config": config # Let templates use config for + # e.g. multiple databases + } + return script.generate_revision( + rev_id or util.rev_id(), + message, + refresh=True, + head=revisions, + branch_labels=branch_label, + **template_args + ) + + +def upgrade(config, revision, sql=False, tag=None): + """Upgrade to a later version. + + :param config: a :class:`.Config` instance. + + :param revision: string revision target or range for --sql mode + + :param sql: if True, use ``--sql`` mode + + :param tag: an arbitrary "tag" that can be intercepted by custom + ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument` + method. + + """ + + script = ScriptDirectory.from_config(config) + + starting_rev = None + if ":" in revision: + if not sql: + raise util.CommandError("Range revision not allowed") + starting_rev, revision = revision.split(":", 2) + + def upgrade(rev, context): + return script._upgrade_revs(revision, rev) + + with EnvironmentContext( + config, + script, + fn=upgrade, + as_sql=sql, + starting_rev=starting_rev, + destination_rev=revision, + tag=tag, + ): + script.run_env() + + +def downgrade(config, revision, sql=False, tag=None): + """Revert to a previous version. + + :param config: a :class:`.Config` instance. + + :param revision: string revision target or range for --sql mode + + :param sql: if True, use ``--sql`` mode + + :param tag: an arbitrary "tag" that can be intercepted by custom + ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument` + method. + + """ + + script = ScriptDirectory.from_config(config) + starting_rev = None + if ":" in revision: + if not sql: + raise util.CommandError("Range revision not allowed") + starting_rev, revision = revision.split(":", 2) + elif sql: + raise util.CommandError( + "downgrade with --sql requires :" + ) + + def downgrade(rev, context): + return script._downgrade_revs(revision, rev) + + with EnvironmentContext( + config, + script, + fn=downgrade, + as_sql=sql, + starting_rev=starting_rev, + destination_rev=revision, + tag=tag, + ): + script.run_env() + + +def show(config, rev): + """Show the revision(s) denoted by the given symbol. + + :param config: a :class:`.Config` instance. + + :param revision: string revision target + + """ + + script = ScriptDirectory.from_config(config) + + if rev == "current": + + def show_current(rev, context): + for sc in script.get_revisions(rev): + config.print_stdout(sc.log_entry) + return [] + + with EnvironmentContext(config, script, fn=show_current): + script.run_env() + else: + for sc in script.get_revisions(rev): + config.print_stdout(sc.log_entry) + + +def history(config, rev_range=None, verbose=False, indicate_current=False): + """List changeset scripts in chronological order. + + :param config: a :class:`.Config` instance. + + :param rev_range: string revision range + + :param verbose: output in verbose mode. + + :param indicate_current: indicate current revision. + + ..versionadded:: 0.9.9 + + """ + + script = ScriptDirectory.from_config(config) + if rev_range is not None: + if ":" not in rev_range: + raise util.CommandError( + "History range requires [start]:[end], " "[start]:, or :[end]" + ) + base, head = rev_range.strip().split(":") + else: + base = head = None + + environment = ( + util.asbool(config.get_main_option("revision_environment")) + or indicate_current + ) + + def _display_history(config, script, base, head, currents=()): + for sc in script.walk_revisions( + base=base or "base", head=head or "heads" + ): + + if indicate_current: + sc._db_current_indicator = sc.revision in currents + + config.print_stdout( + sc.cmd_format( + verbose=verbose, + include_branches=True, + include_doc=True, + include_parents=True, + ) + ) + + def _display_history_w_current(config, script, base, head): + def _display_current_history(rev, context): + if head == "current": + _display_history(config, script, base, rev, rev) + elif base == "current": + _display_history(config, script, rev, head, rev) + else: + _display_history(config, script, base, head, rev) + return [] + + with EnvironmentContext(config, script, fn=_display_current_history): + script.run_env() + + if base == "current" or head == "current" or environment: + _display_history_w_current(config, script, base, head) + else: + _display_history(config, script, base, head) + + +def heads(config, verbose=False, resolve_dependencies=False): + """Show current available heads in the script directory. + + :param config: a :class:`.Config` instance. + + :param verbose: output in verbose mode. + + :param resolve_dependencies: treat dependency version as down revisions. + + """ + + script = ScriptDirectory.from_config(config) + if resolve_dependencies: + heads = script.get_revisions("heads") + else: + heads = script.get_revisions(script.get_heads()) + + for rev in heads: + config.print_stdout( + rev.cmd_format( + verbose, include_branches=True, tree_indicators=False + ) + ) + + +def branches(config, verbose=False): + """Show current branch points. + + :param config: a :class:`.Config` instance. + + :param verbose: output in verbose mode. + + """ + script = ScriptDirectory.from_config(config) + for sc in script.walk_revisions(): + if sc.is_branch_point: + config.print_stdout( + "%s\n%s\n", + sc.cmd_format(verbose, include_branches=True), + "\n".join( + "%s -> %s" + % ( + " " * len(str(sc.revision)), + rev_obj.cmd_format( + False, include_branches=True, include_doc=verbose + ), + ) + for rev_obj in ( + script.get_revision(rev) for rev in sc.nextrev + ) + ), + ) + + +def current(config, verbose=False, head_only=False): + """Display the current revision for a database. + + :param config: a :class:`.Config` instance. + + :param verbose: output in verbose mode. + + :param head_only: deprecated; use ``verbose`` for additional output. + + """ + + script = ScriptDirectory.from_config(config) + + if head_only: + util.warn("--head-only is deprecated", stacklevel=3) + + def display_version(rev, context): + if verbose: + config.print_stdout( + "Current revision(s) for %s:", + util.obfuscate_url_pw(context.connection.engine.url), + ) + for rev in script.get_all_current(rev): + config.print_stdout(rev.cmd_format(verbose)) + + return [] + + with EnvironmentContext(config, script, fn=display_version): + script.run_env() + + +def stamp(config, revision, sql=False, tag=None): + """'stamp' the revision table with the given revision; don't + run any migrations. + + :param config: a :class:`.Config` instance. + + :param revision: target revision. + + :param sql: use ``--sql`` mode + + :param tag: an arbitrary "tag" that can be intercepted by custom + ``env.py`` scripts via the :class:`.EnvironmentContext.get_tag_argument` + method. + + """ + + script = ScriptDirectory.from_config(config) + + starting_rev = None + if ":" in revision: + if not sql: + raise util.CommandError("Range revision not allowed") + starting_rev, revision = revision.split(":", 2) + + def do_stamp(rev, context): + return script._stamp_revs(revision, rev) + + with EnvironmentContext( + config, + script, + fn=do_stamp, + as_sql=sql, + destination_rev=revision, + starting_rev=starting_rev, + tag=tag, + ): + script.run_env() + + +def edit(config, rev): + """Edit revision script(s) using $EDITOR. + + :param config: a :class:`.Config` instance. + + :param rev: target revision. + + """ + + script = ScriptDirectory.from_config(config) + + if rev == "current": + + def edit_current(rev, context): + if not rev: + raise util.CommandError("No current revisions") + for sc in script.get_revisions(rev): + util.edit(sc.path) + return [] + + with EnvironmentContext(config, script, fn=edit_current): + script.run_env() + else: + revs = script.get_revisions(rev) + if not revs: + raise util.CommandError( + "No revision files indicated by symbol '%s'" % rev + ) + for sc in revs: + util.edit(sc.path) diff --git a/py3/lib/python3.6/site-packages/alembic/config.py b/py3/lib/python3.6/site-packages/alembic/config.py new file mode 100644 index 0000000..745ca8b --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/config.py @@ -0,0 +1,544 @@ +from argparse import ArgumentParser +import inspect +import os +import sys + +from . import command +from . import package_dir +from . import util +from .util import compat +from .util.compat import SafeConfigParser + + +class Config(object): + + r"""Represent an Alembic configuration. + + Within an ``env.py`` script, this is available + via the :attr:`.EnvironmentContext.config` attribute, + which in turn is available at ``alembic.context``:: + + from alembic import context + + some_param = context.config.get_main_option("my option") + + When invoking Alembic programatically, a new + :class:`.Config` can be created by passing + the name of an .ini file to the constructor:: + + from alembic.config import Config + alembic_cfg = Config("/path/to/yourapp/alembic.ini") + + With a :class:`.Config` object, you can then + run Alembic commands programmatically using the directives + in :mod:`alembic.command`. + + The :class:`.Config` object can also be constructed without + a filename. Values can be set programmatically, and + new sections will be created as needed:: + + from alembic.config import Config + alembic_cfg = Config() + alembic_cfg.set_main_option("script_location", "myapp:migrations") + alembic_cfg.set_main_option("sqlalchemy.url", "postgresql://foo/bar") + alembic_cfg.set_section_option("mysection", "foo", "bar") + + .. warning:: + + When using programmatic configuration, make sure the + ``env.py`` file in use is compatible with the target configuration; + including that the call to Python ``logging.fileConfig()`` is + omitted if the programmatic configuration doesn't actually include + logging directives. + + For passing non-string values to environments, such as connections and + engines, use the :attr:`.Config.attributes` dictionary:: + + with engine.begin() as connection: + alembic_cfg.attributes['connection'] = connection + command.upgrade(alembic_cfg, "head") + + :param file\_: name of the .ini file to open. + :param ini_section: name of the main Alembic section within the + .ini file + :param output_buffer: optional file-like input buffer which + will be passed to the :class:`.MigrationContext` - used to redirect + the output of "offline generation" when using Alembic programmatically. + :param stdout: buffer where the "print" output of commands will be sent. + Defaults to ``sys.stdout``. + + .. versionadded:: 0.4 + + :param config_args: A dictionary of keys and values that will be used + for substitution in the alembic config file. The dictionary as given + is **copied** to a new one, stored locally as the attribute + ``.config_args``. When the :attr:`.Config.file_config` attribute is + first invoked, the replacement variable ``here`` will be added to this + dictionary before the dictionary is passed to ``SafeConfigParser()`` + to parse the .ini file. + + .. versionadded:: 0.7.0 + + :param attributes: optional dictionary of arbitrary Python keys/values, + which will be populated into the :attr:`.Config.attributes` dictionary. + + .. versionadded:: 0.7.5 + + .. seealso:: + + :ref:`connection_sharing` + + """ + + def __init__( + self, + file_=None, + ini_section="alembic", + output_buffer=None, + stdout=sys.stdout, + cmd_opts=None, + config_args=util.immutabledict(), + attributes=None, + ): + """Construct a new :class:`.Config` + + """ + self.config_file_name = file_ + self.config_ini_section = ini_section + self.output_buffer = output_buffer + self.stdout = stdout + self.cmd_opts = cmd_opts + self.config_args = dict(config_args) + if attributes: + self.attributes.update(attributes) + + cmd_opts = None + """The command-line options passed to the ``alembic`` script. + + Within an ``env.py`` script this can be accessed via the + :attr:`.EnvironmentContext.config` attribute. + + .. versionadded:: 0.6.0 + + .. seealso:: + + :meth:`.EnvironmentContext.get_x_argument` + + """ + + config_file_name = None + """Filesystem path to the .ini file in use.""" + + config_ini_section = None + """Name of the config file section to read basic configuration + from. Defaults to ``alembic``, that is the ``[alembic]`` section + of the .ini file. This value is modified using the ``-n/--name`` + option to the Alembic runnier. + + """ + + @util.memoized_property + def attributes(self): + """A Python dictionary for storage of additional state. + + + This is a utility dictionary which can include not just strings but + engines, connections, schema objects, or anything else. + Use this to pass objects into an env.py script, such as passing + a :class:`sqlalchemy.engine.base.Connection` when calling + commands from :mod:`alembic.command` programmatically. + + .. versionadded:: 0.7.5 + + .. seealso:: + + :ref:`connection_sharing` + + :paramref:`.Config.attributes` + + """ + return {} + + def print_stdout(self, text, *arg): + """Render a message to standard out. + + When :meth:`.Config.print_stdout` is called with additional args + those arguments will formatted against the provided text, + otherwise we simply output the provided text verbatim. + + e.g.:: + + >>> config.print_stdout('Some text %s', 'arg') + Some Text arg + + """ + + if arg: + output = compat.text_type(text) % arg + else: + output = compat.text_type(text) + + util.write_outstream(self.stdout, output, "\n") + + @util.memoized_property + def file_config(self): + """Return the underlying ``ConfigParser`` object. + + Direct access to the .ini file is available here, + though the :meth:`.Config.get_section` and + :meth:`.Config.get_main_option` + methods provide a possibly simpler interface. + + """ + + if self.config_file_name: + here = os.path.abspath(os.path.dirname(self.config_file_name)) + else: + here = "" + self.config_args["here"] = here + file_config = SafeConfigParser(self.config_args) + if self.config_file_name: + file_config.read([self.config_file_name]) + else: + file_config.add_section(self.config_ini_section) + return file_config + + def get_template_directory(self): + """Return the directory where Alembic setup templates are found. + + This method is used by the alembic ``init`` and ``list_templates`` + commands. + + """ + return os.path.join(package_dir, "templates") + + def get_section(self, name): + """Return all the configuration options from a given .ini file section + as a dictionary. + + """ + return dict(self.file_config.items(name)) + + def set_main_option(self, name, value): + """Set an option programmatically within the 'main' section. + + This overrides whatever was in the .ini file. + + :param name: name of the value + + :param value: the value. Note that this value is passed to + ``ConfigParser.set``, which supports variable interpolation using + pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of + an interpolation symbol must therefore be escaped, e.g. ``%%``. + The given value may refer to another value already in the file + using the interpolation format. + + """ + self.set_section_option(self.config_ini_section, name, value) + + def remove_main_option(self, name): + self.file_config.remove_option(self.config_ini_section, name) + + def set_section_option(self, section, name, value): + """Set an option programmatically within the given section. + + The section is created if it doesn't exist already. + The value here will override whatever was in the .ini + file. + + :param section: name of the section + + :param name: name of the value + + :param value: the value. Note that this value is passed to + ``ConfigParser.set``, which supports variable interpolation using + pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of + an interpolation symbol must therefore be escaped, e.g. ``%%``. + The given value may refer to another value already in the file + using the interpolation format. + + """ + + if not self.file_config.has_section(section): + self.file_config.add_section(section) + self.file_config.set(section, name, value) + + def get_section_option(self, section, name, default=None): + """Return an option from the given section of the .ini file. + + """ + if not self.file_config.has_section(section): + raise util.CommandError( + "No config file %r found, or file has no " + "'[%s]' section" % (self.config_file_name, section) + ) + if self.file_config.has_option(section, name): + return self.file_config.get(section, name) + else: + return default + + def get_main_option(self, name, default=None): + """Return an option from the 'main' section of the .ini file. + + This defaults to being a key from the ``[alembic]`` + section, unless the ``-n/--name`` flag were used to + indicate a different section. + + """ + return self.get_section_option(self.config_ini_section, name, default) + + +class CommandLine(object): + def __init__(self, prog=None): + self._generate_args(prog) + + def _generate_args(self, prog): + def add_options(parser, positional, kwargs): + kwargs_opts = { + "template": ( + "-t", + "--template", + dict( + default="generic", + type=str, + help="Setup template for use with 'init'", + ), + ), + "message": ( + "-m", + "--message", + dict( + type=str, help="Message string to use with 'revision'" + ), + ), + "sql": ( + "--sql", + dict( + action="store_true", + help="Don't emit SQL to database - dump to " + "standard output/file instead. See docs on " + "offline mode.", + ), + ), + "tag": ( + "--tag", + dict( + type=str, + help="Arbitrary 'tag' name - can be used by " + "custom env.py scripts.", + ), + ), + "head": ( + "--head", + dict( + type=str, + help="Specify head revision or @head " + "to base new revision on.", + ), + ), + "splice": ( + "--splice", + dict( + action="store_true", + help="Allow a non-head revision as the " + "'head' to splice onto", + ), + ), + "depends_on": ( + "--depends-on", + dict( + action="append", + help="Specify one or more revision identifiers " + "which this revision should depend on.", + ), + ), + "rev_id": ( + "--rev-id", + dict( + type=str, + help="Specify a hardcoded revision id instead of " + "generating one", + ), + ), + "version_path": ( + "--version-path", + dict( + type=str, + help="Specify specific path from config for " + "version file", + ), + ), + "branch_label": ( + "--branch-label", + dict( + type=str, + help="Specify a branch label to apply to the " + "new revision", + ), + ), + "verbose": ( + "-v", + "--verbose", + dict(action="store_true", help="Use more verbose output"), + ), + "resolve_dependencies": ( + "--resolve-dependencies", + dict( + action="store_true", + help="Treat dependency versions as down revisions", + ), + ), + "autogenerate": ( + "--autogenerate", + dict( + action="store_true", + help="Populate revision script with candidate " + "migration operations, based on comparison " + "of database to model.", + ), + ), + "head_only": ( + "--head-only", + dict( + action="store_true", + help="Deprecated. Use --verbose for " + "additional output", + ), + ), + "rev_range": ( + "-r", + "--rev-range", + dict( + action="store", + help="Specify a revision range; " + "format is [start]:[end]", + ), + ), + "indicate_current": ( + "-i", + "--indicate-current", + dict( + action="store_true", + help="Indicate the current revision", + ), + ), + } + positional_help = { + "directory": "location of scripts directory", + "revision": "revision identifier", + "revisions": "one or more revisions, or 'heads' for all heads", + } + for arg in kwargs: + if arg in kwargs_opts: + args = kwargs_opts[arg] + args, kw = args[0:-1], args[-1] + parser.add_argument(*args, **kw) + + for arg in positional: + if arg == "revisions": + subparser.add_argument( + arg, nargs="+", help=positional_help.get(arg) + ) + else: + subparser.add_argument(arg, help=positional_help.get(arg)) + + parser = ArgumentParser(prog=prog) + parser.add_argument( + "-c", + "--config", + type=str, + default="alembic.ini", + help="Alternate config file", + ) + parser.add_argument( + "-n", + "--name", + type=str, + default="alembic", + help="Name of section in .ini file to " "use for Alembic config", + ) + parser.add_argument( + "-x", + action="append", + help="Additional arguments consumed by " + "custom env.py scripts, e.g. -x " + "setting1=somesetting -x setting2=somesetting", + ) + parser.add_argument( + "--raiseerr", + action="store_true", + help="Raise a full stack trace on error", + ) + subparsers = parser.add_subparsers() + + for fn in [getattr(command, n) for n in dir(command)]: + if ( + inspect.isfunction(fn) + and fn.__name__[0] != "_" + and fn.__module__ == "alembic.command" + ): + + spec = compat.inspect_getargspec(fn) + if spec[3]: + positional = spec[0][1 : -len(spec[3])] + kwarg = spec[0][-len(spec[3]) :] + else: + positional = spec[0][1:] + kwarg = [] + + # parse first line(s) of helptext without a line break + help_ = fn.__doc__ + if help_: + help_text = [] + for line in help_.split("\n"): + if not line.strip(): + break + else: + help_text.append(line.strip()) + else: + help_text = "" + subparser = subparsers.add_parser( + fn.__name__, help=" ".join(help_text) + ) + add_options(subparser, positional, kwarg) + subparser.set_defaults(cmd=(fn, positional, kwarg)) + self.parser = parser + + def run_cmd(self, config, options): + fn, positional, kwarg = options.cmd + + try: + fn( + config, + *[getattr(options, k, None) for k in positional], + **dict((k, getattr(options, k, None)) for k in kwarg) + ) + except util.CommandError as e: + if options.raiseerr: + raise + else: + util.err(str(e)) + + def main(self, argv=None): + options = self.parser.parse_args(argv) + if not hasattr(options, "cmd"): + # see http://bugs.python.org/issue9253, argparse + # behavior changed incompatibly in py3.3 + self.parser.error("too few arguments") + else: + cfg = Config( + file_=options.config, + ini_section=options.name, + cmd_opts=options, + ) + self.run_cmd(cfg, options) + + +def main(argv=None, prog=None, **kwargs): + """The console runner function for Alembic.""" + + CommandLine(prog=prog).main(argv=argv) + + +if __name__ == "__main__": + main() diff --git a/py3/lib/python3.6/site-packages/alembic/context.py b/py3/lib/python3.6/site-packages/alembic/context.py new file mode 100644 index 0000000..758fca8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/context.py @@ -0,0 +1,5 @@ +from .runtime.environment import EnvironmentContext + +# create proxy functions for +# each method on the EnvironmentContext class. +EnvironmentContext.create_module_class_proxy(globals(), locals()) diff --git a/py3/lib/python3.6/site-packages/alembic/ddl/__init__.py b/py3/lib/python3.6/site-packages/alembic/ddl/__init__.py new file mode 100644 index 0000000..7d50ba0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/ddl/__init__.py @@ -0,0 +1,6 @@ +from . import mssql # noqa +from . import mysql # noqa +from . import oracle # noqa +from . import postgresql # noqa +from . import sqlite # noqa +from .impl import DefaultImpl # noqa diff --git a/py3/lib/python3.6/site-packages/alembic/ddl/base.py b/py3/lib/python3.6/site-packages/alembic/ddl/base.py new file mode 100644 index 0000000..90573cf --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/ddl/base.py @@ -0,0 +1,213 @@ +import functools + +from sqlalchemy import Integer +from sqlalchemy import types as sqltypes +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.schema import Column +from sqlalchemy.schema import DDLElement + +from .. import util +from ..util.sqla_compat import _columns_for_constraint # noqa +from ..util.sqla_compat import _find_columns # noqa +from ..util.sqla_compat import _fk_spec # noqa +from ..util.sqla_compat import _is_type_bound # noqa +from ..util.sqla_compat import _table_for_constraint # noqa + +# backwards compat + +if util.sqla_09: + from sqlalchemy.sql.elements import quoted_name + + +class AlterTable(DDLElement): + + """Represent an ALTER TABLE statement. + + Only the string name and optional schema name of the table + is required, not a full Table object. + + """ + + def __init__(self, table_name, schema=None): + self.table_name = table_name + self.schema = schema + + +class RenameTable(AlterTable): + def __init__(self, old_table_name, new_table_name, schema=None): + super(RenameTable, self).__init__(old_table_name, schema=schema) + self.new_table_name = new_table_name + + +class AlterColumn(AlterTable): + def __init__( + self, + name, + column_name, + schema=None, + existing_type=None, + existing_nullable=None, + existing_server_default=None, + existing_comment=None, + ): + super(AlterColumn, self).__init__(name, schema=schema) + self.column_name = column_name + self.existing_type = ( + sqltypes.to_instance(existing_type) + if existing_type is not None + else None + ) + self.existing_nullable = existing_nullable + self.existing_server_default = existing_server_default + self.existing_comment = existing_comment + + +class ColumnNullable(AlterColumn): + def __init__(self, name, column_name, nullable, **kw): + super(ColumnNullable, self).__init__(name, column_name, **kw) + self.nullable = nullable + + +class ColumnType(AlterColumn): + def __init__(self, name, column_name, type_, **kw): + super(ColumnType, self).__init__(name, column_name, **kw) + self.type_ = sqltypes.to_instance(type_) + + +class ColumnName(AlterColumn): + def __init__(self, name, column_name, newname, **kw): + super(ColumnName, self).__init__(name, column_name, **kw) + self.newname = newname + + +class ColumnDefault(AlterColumn): + def __init__(self, name, column_name, default, **kw): + super(ColumnDefault, self).__init__(name, column_name, **kw) + self.default = default + + +class AddColumn(AlterTable): + def __init__(self, name, column, schema=None): + super(AddColumn, self).__init__(name, schema=schema) + self.column = column + + +class DropColumn(AlterTable): + def __init__(self, name, column, schema=None): + super(DropColumn, self).__init__(name, schema=schema) + self.column = column + + +class ColumnComment(AlterColumn): + def __init__(self, name, column_name, comment, **kw): + super(ColumnComment, self).__init__(name, column_name, **kw) + self.comment = comment + + +@compiles(RenameTable) +def visit_rename_table(element, compiler, **kw): + return "%s RENAME TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, element.schema), + ) + + +@compiles(AddColumn) +def visit_add_column(element, compiler, **kw): + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + add_column(compiler, element.column, **kw), + ) + + +@compiles(DropColumn) +def visit_drop_column(element, compiler, **kw): + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + drop_column(compiler, element.column.name, **kw), + ) + + +@compiles(ColumnNullable) +def visit_column_nullable(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "DROP NOT NULL" if element.nullable else "SET NOT NULL", + ) + + +@compiles(ColumnType) +def visit_column_type(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "TYPE %s" % format_type(compiler, element.type_), + ) + + +@compiles(ColumnName) +def visit_column_name(element, compiler, **kw): + return "%s RENAME %s TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + ) + + +@compiles(ColumnDefault) +def visit_column_default(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "SET DEFAULT %s" % format_server_default(compiler, element.default) + if element.default is not None + else "DROP DEFAULT", + ) + + +def quote_dotted(name, quote): + """quote the elements of a dotted name""" + + if util.sqla_09 and isinstance(name, quoted_name): + return quote(name) + result = ".".join([quote(x) for x in name.split(".")]) + return result + + +def format_table_name(compiler, name, schema): + quote = functools.partial(compiler.preparer.quote) + if schema: + return quote_dotted(schema, quote) + "." + quote(name) + else: + return quote(name) + + +def format_column_name(compiler, name): + return compiler.preparer.quote(name) + + +def format_server_default(compiler, default): + return compiler.get_column_default_string( + Column("x", Integer, server_default=default) + ) + + +def format_type(compiler, type_): + return compiler.dialect.type_compiler.process(type_) + + +def alter_table(compiler, name, schema): + return "ALTER TABLE %s" % format_table_name(compiler, name, schema) + + +def drop_column(compiler, name): + return "DROP COLUMN %s" % format_column_name(compiler, name) + + +def alter_column(compiler, name): + return "ALTER COLUMN %s" % format_column_name(compiler, name) + + +def add_column(compiler, column, **kw): + return "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) diff --git a/py3/lib/python3.6/site-packages/alembic/ddl/impl.py b/py3/lib/python3.6/site-packages/alembic/ddl/impl.py new file mode 100644 index 0000000..17209c9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/ddl/impl.py @@ -0,0 +1,469 @@ +from sqlalchemy import schema +from sqlalchemy import text +from sqlalchemy import types as sqltypes + +from . import base +from .. import util +from ..util import sqla_compat +from ..util.compat import string_types +from ..util.compat import text_type +from ..util.compat import with_metaclass + + +class ImplMeta(type): + def __init__(cls, classname, bases, dict_): + newtype = type.__init__(cls, classname, bases, dict_) + if "__dialect__" in dict_: + _impls[dict_["__dialect__"]] = cls + return newtype + + +_impls = {} + + +class DefaultImpl(with_metaclass(ImplMeta)): + + """Provide the entrypoint for major migration operations, + including database-specific behavioral variances. + + While individual SQL/DDL constructs already provide + for database-specific implementations, variances here + allow for entirely different sequences of operations + to take place for a particular migration, such as + SQL Server's special 'IDENTITY INSERT' step for + bulk inserts. + + """ + + __dialect__ = "default" + + transactional_ddl = False + command_terminator = ";" + + def __init__( + self, + dialect, + connection, + as_sql, + transactional_ddl, + output_buffer, + context_opts, + ): + self.dialect = dialect + self.connection = connection + self.as_sql = as_sql + self.literal_binds = context_opts.get("literal_binds", False) + + self.output_buffer = output_buffer + self.memo = {} + self.context_opts = context_opts + if transactional_ddl is not None: + self.transactional_ddl = transactional_ddl + + if self.literal_binds: + if not self.as_sql: + raise util.CommandError( + "Can't use literal_binds setting without as_sql mode" + ) + + @classmethod + def get_by_dialect(cls, dialect): + return _impls[dialect.name] + + def static_output(self, text): + self.output_buffer.write(text_type(text + "\n\n")) + self.output_buffer.flush() + + def requires_recreate_in_batch(self, batch_op): + """Return True if the given :class:`.BatchOperationsImpl` + would need the table to be recreated and copied in order to + proceed. + + Normally, only returns True on SQLite when operations other + than add_column are present. + + """ + return False + + def prep_table_for_batch(self, table): + """perform any operations needed on a table before a new + one is created to replace it in batch mode. + + the PG dialect uses this to drop constraints on the table + before the new one uses those same names. + + """ + + @property + def bind(self): + return self.connection + + def _exec( + self, + construct, + execution_options=None, + multiparams=(), + params=util.immutabledict(), + ): + if isinstance(construct, string_types): + construct = text(construct) + if self.as_sql: + if multiparams or params: + # TODO: coverage + raise Exception("Execution arguments not allowed with as_sql") + + if self.literal_binds and not isinstance( + construct, schema.DDLElement + ): + compile_kw = dict(compile_kwargs={"literal_binds": True}) + else: + compile_kw = {} + + self.static_output( + text_type( + construct.compile(dialect=self.dialect, **compile_kw) + ) + .replace("\t", " ") + .strip() + + self.command_terminator + ) + else: + conn = self.connection + if execution_options: + conn = conn.execution_options(**execution_options) + return conn.execute(construct, *multiparams, **params) + + def execute(self, sql, execution_options=None): + self._exec(sql, execution_options) + + def alter_column( + self, + table_name, + column_name, + nullable=None, + server_default=False, + name=None, + type_=None, + schema=None, + autoincrement=None, + comment=False, + existing_comment=None, + existing_type=None, + existing_server_default=None, + existing_nullable=None, + existing_autoincrement=None, + ): + if autoincrement is not None or existing_autoincrement is not None: + util.warn( + "autoincrement and existing_autoincrement " + "only make sense for MySQL", + stacklevel=3, + ) + if nullable is not None: + self._exec( + base.ColumnNullable( + table_name, + column_name, + nullable, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + ) + ) + if server_default is not False: + self._exec( + base.ColumnDefault( + table_name, + column_name, + server_default, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + ) + ) + if type_ is not None: + self._exec( + base.ColumnType( + table_name, + column_name, + type_, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + ) + ) + + if comment is not False: + self._exec( + base.ColumnComment( + table_name, + column_name, + comment, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + ) + ) + + # do the new name last ;) + if name is not None: + self._exec( + base.ColumnName( + table_name, + column_name, + name, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + ) + ) + + def add_column(self, table_name, column, schema=None): + self._exec(base.AddColumn(table_name, column, schema=schema)) + + def drop_column(self, table_name, column, schema=None, **kw): + self._exec(base.DropColumn(table_name, column, schema=schema)) + + def add_constraint(self, const): + if const._create_rule is None or const._create_rule(self): + self._exec(schema.AddConstraint(const)) + + def drop_constraint(self, const): + self._exec(schema.DropConstraint(const)) + + def rename_table(self, old_table_name, new_table_name, schema=None): + self._exec( + base.RenameTable(old_table_name, new_table_name, schema=schema) + ) + + def create_table(self, table): + table.dispatch.before_create( + table, self.connection, checkfirst=False, _ddl_runner=self + ) + self._exec(schema.CreateTable(table)) + table.dispatch.after_create( + table, self.connection, checkfirst=False, _ddl_runner=self + ) + for index in table.indexes: + self._exec(schema.CreateIndex(index)) + + with_comment = ( + sqla_compat._dialect_supports_comments(self.dialect) + and not self.dialect.inline_comments + ) + comment = sqla_compat._comment_attribute(table) + if comment and with_comment: + self.create_table_comment(table) + + for column in table.columns: + comment = sqla_compat._comment_attribute(column) + if comment and with_comment: + self.create_column_comment(column) + + def drop_table(self, table): + self._exec(schema.DropTable(table)) + + def create_index(self, index): + self._exec(schema.CreateIndex(index)) + + def create_table_comment(self, table): + self._exec(schema.SetTableComment(table)) + + def drop_table_comment(self, table): + self._exec(schema.DropTableComment(table)) + + def create_column_comment(self, column): + self._exec(schema.SetColumnComment(column)) + + def drop_index(self, index): + self._exec(schema.DropIndex(index)) + + def bulk_insert(self, table, rows, multiinsert=True): + if not isinstance(rows, list): + raise TypeError("List expected") + elif rows and not isinstance(rows[0], dict): + raise TypeError("List of dictionaries expected") + if self.as_sql: + for row in rows: + self._exec( + table.insert(inline=True).values( + **dict( + ( + k, + sqla_compat._literal_bindparam( + k, v, type_=table.c[k].type + ) + if not isinstance( + v, sqla_compat._literal_bindparam + ) + else v, + ) + for k, v in row.items() + ) + ) + ) + else: + # work around http://www.sqlalchemy.org/trac/ticket/2461 + if not hasattr(table, "_autoincrement_column"): + table._autoincrement_column = None + if rows: + if multiinsert: + self._exec(table.insert(inline=True), multiparams=rows) + else: + for row in rows: + self._exec(table.insert(inline=True).values(**row)) + + def compare_type(self, inspector_column, metadata_column): + + conn_type = inspector_column.type + metadata_type = metadata_column.type + + metadata_impl = metadata_type.dialect_impl(self.dialect) + if isinstance(metadata_impl, sqltypes.Variant): + metadata_impl = metadata_impl.impl.dialect_impl(self.dialect) + + # work around SQLAlchemy bug "stale value for type affinity" + # fixed in 0.7.4 + metadata_impl.__dict__.pop("_type_affinity", None) + + if hasattr(metadata_impl, "compare_against_backend"): + comparison = metadata_impl.compare_against_backend( + self.dialect, conn_type + ) + if comparison is not None: + return not comparison + + if conn_type._compare_type_affinity(metadata_impl): + comparator = _type_comparators.get(conn_type._type_affinity, None) + + return comparator and comparator(metadata_impl, conn_type) + else: + return True + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + return rendered_inspector_default != rendered_metadata_default + + def correct_for_autogen_constraints( + self, + conn_uniques, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ): + pass + + def render_ddl_sql_expr(self, expr, is_server_default=False, **kw): + """Render a SQL expression that is typically a server default, + index expression, etc. + + .. versionadded:: 1.0.11 + + """ + + compile_kw = dict( + compile_kwargs={"literal_binds": True, "include_table": False} + ) + return text_type(expr.compile(dialect=self.dialect, **compile_kw)) + + def _compat_autogen_column_reflect(self, inspector): + return self.autogen_column_reflect + + def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks): + pass + + def autogen_column_reflect(self, inspector, table, column_info): + """A hook that is attached to the 'column_reflect' event for when + a Table is reflected from the database during the autogenerate + process. + + Dialects can elect to modify the information gathered here. + + """ + + def start_migrations(self): + """A hook called when :meth:`.EnvironmentContext.run_migrations` + is called. + + Implementations can set up per-migration-run state here. + + """ + + def emit_begin(self): + """Emit the string ``BEGIN``, or the backend-specific + equivalent, on the current connection context. + + This is used in offline mode and typically + via :meth:`.EnvironmentContext.begin_transaction`. + + """ + self.static_output("BEGIN" + self.command_terminator) + + def emit_commit(self): + """Emit the string ``COMMIT``, or the backend-specific + equivalent, on the current connection context. + + This is used in offline mode and typically + via :meth:`.EnvironmentContext.begin_transaction`. + + """ + self.static_output("COMMIT" + self.command_terminator) + + def render_type(self, type_obj, autogen_context): + return False + + +def _string_compare(t1, t2): + return t1.length is not None and t1.length != t2.length + + +def _numeric_compare(t1, t2): + return (t1.precision is not None and t1.precision != t2.precision) or ( + t1.precision is not None + and t1.scale is not None + and t1.scale != t2.scale + ) + + +def _integer_compare(t1, t2): + t1_small_or_big = ( + "S" + if isinstance(t1, sqltypes.SmallInteger) + else "B" + if isinstance(t1, sqltypes.BigInteger) + else "I" + ) + t2_small_or_big = ( + "S" + if isinstance(t2, sqltypes.SmallInteger) + else "B" + if isinstance(t2, sqltypes.BigInteger) + else "I" + ) + return t1_small_or_big != t2_small_or_big + + +def _datetime_compare(t1, t2): + return t1.timezone != t2.timezone + + +_type_comparators = { + sqltypes.String: _string_compare, + sqltypes.Numeric: _numeric_compare, + sqltypes.Integer: _integer_compare, + sqltypes.DateTime: _datetime_compare, +} diff --git a/py3/lib/python3.6/site-packages/alembic/ddl/mssql.py b/py3/lib/python3.6/site-packages/alembic/ddl/mssql.py new file mode 100644 index 0000000..de2168b --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/ddl/mssql.py @@ -0,0 +1,257 @@ +from sqlalchemy import types as sqltypes +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.schema import Column +from sqlalchemy.schema import CreateIndex +from sqlalchemy.sql.expression import ClauseElement +from sqlalchemy.sql.expression import Executable + +from .base import AddColumn +from .base import alter_column +from .base import alter_table +from .base import ColumnDefault +from .base import ColumnName +from .base import ColumnNullable +from .base import ColumnType +from .base import format_column_name +from .base import format_server_default +from .base import format_table_name +from .base import format_type +from .base import RenameTable +from .impl import DefaultImpl +from .. import util + + +class MSSQLImpl(DefaultImpl): + __dialect__ = "mssql" + transactional_ddl = True + batch_separator = "GO" + + def __init__(self, *arg, **kw): + super(MSSQLImpl, self).__init__(*arg, **kw) + self.batch_separator = self.context_opts.get( + "mssql_batch_separator", self.batch_separator + ) + + def _exec(self, construct, *args, **kw): + result = super(MSSQLImpl, self)._exec(construct, *args, **kw) + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + return result + + def emit_begin(self): + self.static_output("BEGIN TRANSACTION" + self.command_terminator) + + def emit_commit(self): + super(MSSQLImpl, self).emit_commit() + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + + def alter_column( + self, + table_name, + column_name, + nullable=None, + server_default=False, + name=None, + type_=None, + schema=None, + existing_type=None, + existing_server_default=None, + existing_nullable=None, + **kw + ): + + if nullable is not None and existing_type is None: + if type_ is not None: + existing_type = type_ + # the NULL/NOT NULL alter will handle + # the type alteration + type_ = None + else: + raise util.CommandError( + "MS-SQL ALTER COLUMN operations " + "with NULL or NOT NULL require the " + "existing_type or a new type_ be passed." + ) + + super(MSSQLImpl, self).alter_column( + table_name, + column_name, + nullable=nullable, + type_=type_, + schema=schema, + existing_type=existing_type, + existing_nullable=existing_nullable, + **kw + ) + + if server_default is not False: + if existing_server_default is not False or server_default is None: + self._exec( + _ExecDropConstraint( + table_name, column_name, "sys.default_constraints" + ) + ) + if server_default is not None: + super(MSSQLImpl, self).alter_column( + table_name, + column_name, + schema=schema, + server_default=server_default, + ) + + if name is not None: + super(MSSQLImpl, self).alter_column( + table_name, column_name, schema=schema, name=name + ) + + def create_index(self, index): + # this likely defaults to None if not present, so get() + # should normally not return the default value. being + # defensive in any case + mssql_include = index.kwargs.get("mssql_include", None) or () + for col in mssql_include: + if col not in index.table.c: + index.table.append_column(Column(col, sqltypes.NullType)) + self._exec(CreateIndex(index)) + + def bulk_insert(self, table, rows, **kw): + if self.as_sql: + self._exec( + "SET IDENTITY_INSERT %s ON" + % self.dialect.identifier_preparer.format_table(table) + ) + super(MSSQLImpl, self).bulk_insert(table, rows, **kw) + self._exec( + "SET IDENTITY_INSERT %s OFF" + % self.dialect.identifier_preparer.format_table(table) + ) + else: + super(MSSQLImpl, self).bulk_insert(table, rows, **kw) + + def drop_column(self, table_name, column, **kw): + drop_default = kw.pop("mssql_drop_default", False) + if drop_default: + self._exec( + _ExecDropConstraint( + table_name, column, "sys.default_constraints" + ) + ) + drop_check = kw.pop("mssql_drop_check", False) + if drop_check: + self._exec( + _ExecDropConstraint( + table_name, column, "sys.check_constraints" + ) + ) + drop_fks = kw.pop("mssql_drop_foreign_key", False) + if drop_fks: + self._exec(_ExecDropFKConstraint(table_name, column)) + super(MSSQLImpl, self).drop_column(table_name, column, **kw) + + +class _ExecDropConstraint(Executable, ClauseElement): + def __init__(self, tname, colname, type_): + self.tname = tname + self.colname = colname + self.type_ = type_ + + +class _ExecDropFKConstraint(Executable, ClauseElement): + def __init__(self, tname, colname): + self.tname = tname + self.colname = colname + + +@compiles(_ExecDropConstraint, "mssql") +def _exec_drop_col_constraint(element, compiler, **kw): + tname, colname, type_ = element.tname, element.colname, element.type_ + # from http://www.mssqltips.com/sqlservertip/1425/\ + # working-with-default-constraints-in-sql-server/ + # TODO: needs table formatting, etc. + return """declare @const_name varchar(256) +select @const_name = [name] from %(type)s +where parent_object_id = object_id('%(tname)s') +and col_name(parent_object_id, parent_column_id) = '%(colname)s' +exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { + "type": type_, + "tname": tname, + "colname": colname, + "tname_quoted": format_table_name(compiler, tname, None), + } + + +@compiles(_ExecDropFKConstraint, "mssql") +def _exec_drop_col_fk_constraint(element, compiler, **kw): + tname, colname = element.tname, element.colname + + return """declare @const_name varchar(256) +select @const_name = [name] from + sys.foreign_keys fk join sys.foreign_key_columns fkc + on fk.object_id=fkc.constraint_object_id +where fkc.parent_object_id = object_id('%(tname)s') +and col_name(fkc.parent_object_id, fkc.parent_column_id) = '%(colname)s' +exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { + "tname": tname, + "colname": colname, + "tname_quoted": format_table_name(compiler, tname, None), + } + + +@compiles(AddColumn, "mssql") +def visit_add_column(element, compiler, **kw): + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + mssql_add_column(compiler, element.column, **kw), + ) + + +def mssql_add_column(compiler, column, **kw): + return "ADD %s" % compiler.get_column_specification(column, **kw) + + +@compiles(ColumnNullable, "mssql") +def visit_column_nullable(element, compiler, **kw): + return "%s %s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + format_type(compiler, element.existing_type), + "NULL" if element.nullable else "NOT NULL", + ) + + +@compiles(ColumnDefault, "mssql") +def visit_column_default(element, compiler, **kw): + # TODO: there can also be a named constraint + # with ADD CONSTRAINT here + return "%s ADD DEFAULT %s FOR %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_server_default(compiler, element.default), + format_column_name(compiler, element.column_name), + ) + + +@compiles(ColumnName, "mssql") +def visit_rename_column(element, compiler, **kw): + return "EXEC sp_rename '%s.%s', %s, 'COLUMN'" % ( + format_table_name(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + ) + + +@compiles(ColumnType, "mssql") +def visit_column_type(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + format_type(compiler, element.type_), + ) + + +@compiles(RenameTable, "mssql") +def visit_rename_table(element, compiler, **kw): + return "EXEC sp_rename '%s', %s" % ( + format_table_name(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, None), + ) diff --git a/py3/lib/python3.6/site-packages/alembic/ddl/mysql.py b/py3/lib/python3.6/site-packages/alembic/ddl/mysql.py new file mode 100644 index 0000000..b514149 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/ddl/mysql.py @@ -0,0 +1,454 @@ +import re + +from sqlalchemy import schema +from sqlalchemy import types as sqltypes +from sqlalchemy.ext.compiler import compiles + +from .base import alter_table +from .base import AlterColumn +from .base import ColumnDefault +from .base import ColumnName +from .base import ColumnNullable +from .base import ColumnType +from .base import format_column_name +from .base import format_server_default +from .impl import DefaultImpl +from .. import util +from ..autogenerate import compare +from ..util.compat import string_types +from ..util.sqla_compat import _is_mariadb +from ..util.sqla_compat import _is_type_bound +from ..util.sqla_compat import sqla_100 + + +class MySQLImpl(DefaultImpl): + __dialect__ = "mysql" + + transactional_ddl = False + + def alter_column( + self, + table_name, + column_name, + nullable=None, + server_default=False, + name=None, + type_=None, + schema=None, + existing_type=None, + existing_server_default=None, + existing_nullable=None, + autoincrement=None, + existing_autoincrement=None, + comment=False, + existing_comment=None, + **kw + ): + if name is not None or self._is_mysql_allowed_functional_default( + type_ if type_ is not None else existing_type, server_default + ): + self._exec( + MySQLChangeColumn( + table_name, + column_name, + schema=schema, + newname=name if name is not None else column_name, + nullable=nullable + if nullable is not None + else existing_nullable + if existing_nullable is not None + else True, + type_=type_ if type_ is not None else existing_type, + default=server_default + if server_default is not False + else existing_server_default, + autoincrement=autoincrement + if autoincrement is not None + else existing_autoincrement, + ) + ) + elif ( + nullable is not None + or type_ is not None + or autoincrement is not None + or comment is not False + ): + self._exec( + MySQLModifyColumn( + table_name, + column_name, + schema=schema, + newname=name if name is not None else column_name, + nullable=nullable + if nullable is not None + else existing_nullable + if existing_nullable is not None + else True, + type_=type_ if type_ is not None else existing_type, + default=server_default + if server_default is not False + else existing_server_default, + autoincrement=autoincrement + if autoincrement is not None + else existing_autoincrement, + comment=comment + if comment is not False + else existing_comment, + ) + ) + elif server_default is not False: + self._exec( + MySQLAlterDefault( + table_name, column_name, server_default, schema=schema + ) + ) + + def drop_constraint(self, const): + if isinstance(const, schema.CheckConstraint) and _is_type_bound(const): + return + + super(MySQLImpl, self).drop_constraint(const) + + def _is_mysql_allowed_functional_default(self, type_, server_default): + return ( + type_ is not None + and type_._type_affinity is sqltypes.DateTime + and server_default is not None + ) + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + # partially a workaround for SQLAlchemy issue #3023; if the + # column were created without "NOT NULL", MySQL may have added + # an implicit default of '0' which we need to skip + # TODO: this is not really covered anymore ? + if ( + metadata_column.type._type_affinity is sqltypes.Integer + and inspector_column.primary_key + and not inspector_column.autoincrement + and not rendered_metadata_default + and rendered_inspector_default == "'0'" + ): + return False + elif inspector_column.type._type_affinity is sqltypes.Integer: + rendered_inspector_default = ( + re.sub(r"^'|'$", "", rendered_inspector_default) + if rendered_inspector_default is not None + else None + ) + return rendered_inspector_default != rendered_metadata_default + elif rendered_inspector_default and rendered_metadata_default: + # adjust for "function()" vs. "FUNCTION" as can occur particularly + # for the CURRENT_TIMESTAMP function on newer MariaDB versions + + # SQLAlchemy MySQL dialect bundles ON UPDATE into the server + # default; adjust for this possibly being present. + onupdate_ins = re.match( + r"(.*) (on update.*?)(?:\(\))?$", + rendered_inspector_default.lower(), + ) + onupdate_met = re.match( + r"(.*) (on update.*?)(?:\(\))?$", + rendered_metadata_default.lower(), + ) + + if onupdate_ins: + if not onupdate_met: + return True + elif onupdate_ins.group(2) != onupdate_met.group(2): + return True + + rendered_inspector_default = onupdate_ins.group(1) + rendered_metadata_default = onupdate_met.group(1) + + return re.sub( + r"(.*?)(?:\(\))?$", r"\1", rendered_inspector_default.lower() + ) != re.sub( + r"(.*?)(?:\(\))?$", r"\1", rendered_metadata_default.lower() + ) + else: + return rendered_inspector_default != rendered_metadata_default + + def correct_for_autogen_constraints( + self, + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ): + + # TODO: if SQLA 1.0, make use of "duplicates_index" + # metadata + removed = set() + for idx in list(conn_indexes): + if idx.unique: + continue + # MySQL puts implicit indexes on FK columns, even if + # composite and even if MyISAM, so can't check this too easily. + # the name of the index may be the column name or it may + # be the name of the FK constraint. + for col in idx.columns: + if idx.name == col.name: + conn_indexes.remove(idx) + removed.add(idx.name) + break + for fk in col.foreign_keys: + if fk.name == idx.name: + conn_indexes.remove(idx) + removed.add(idx.name) + break + if idx.name in removed: + break + + # then remove indexes from the "metadata_indexes" + # that we've removed from reflected, otherwise they come out + # as adds (see #202) + for idx in list(metadata_indexes): + if idx.name in removed: + metadata_indexes.remove(idx) + + if not sqla_100: + self._legacy_correct_for_dupe_uq_uix( + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ) + + def _legacy_correct_for_dupe_uq_uix( + self, + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ): + + # then dedupe unique indexes vs. constraints, since MySQL + # doesn't really have unique constraints as a separate construct. + # but look in the metadata and try to maintain constructs + # that already seem to be defined one way or the other + # on that side. See #276 + metadata_uq_names = set( + [ + cons.name + for cons in metadata_unique_constraints + if cons.name is not None + ] + ) + + unnamed_metadata_uqs = set( + [ + compare._uq_constraint_sig(cons).sig + for cons in metadata_unique_constraints + if cons.name is None + ] + ) + + metadata_ix_names = set( + [cons.name for cons in metadata_indexes if cons.unique] + ) + conn_uq_names = dict( + (cons.name, cons) for cons in conn_unique_constraints + ) + conn_ix_names = dict( + (cons.name, cons) for cons in conn_indexes if cons.unique + ) + + for overlap in set(conn_uq_names).intersection(conn_ix_names): + if overlap not in metadata_uq_names: + if ( + compare._uq_constraint_sig(conn_uq_names[overlap]).sig + not in unnamed_metadata_uqs + ): + + conn_unique_constraints.discard(conn_uq_names[overlap]) + elif overlap not in metadata_ix_names: + conn_indexes.discard(conn_ix_names[overlap]) + + def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks): + conn_fk_by_sig = dict( + (compare._fk_constraint_sig(fk).sig, fk) for fk in conn_fks + ) + metadata_fk_by_sig = dict( + (compare._fk_constraint_sig(fk).sig, fk) for fk in metadata_fks + ) + + for sig in set(conn_fk_by_sig).intersection(metadata_fk_by_sig): + mdfk = metadata_fk_by_sig[sig] + cnfk = conn_fk_by_sig[sig] + # MySQL considers RESTRICT to be the default and doesn't + # report on it. if the model has explicit RESTRICT and + # the conn FK has None, set it to RESTRICT + if ( + mdfk.ondelete is not None + and mdfk.ondelete.lower() == "restrict" + and cnfk.ondelete is None + ): + cnfk.ondelete = "RESTRICT" + if ( + mdfk.onupdate is not None + and mdfk.onupdate.lower() == "restrict" + and cnfk.onupdate is None + ): + cnfk.onupdate = "RESTRICT" + + +class MySQLAlterDefault(AlterColumn): + def __init__(self, name, column_name, default, schema=None): + super(AlterColumn, self).__init__(name, schema=schema) + self.column_name = column_name + self.default = default + + +class MySQLChangeColumn(AlterColumn): + def __init__( + self, + name, + column_name, + schema=None, + newname=None, + type_=None, + nullable=None, + default=False, + autoincrement=None, + comment=False, + ): + super(AlterColumn, self).__init__(name, schema=schema) + self.column_name = column_name + self.nullable = nullable + self.newname = newname + self.default = default + self.autoincrement = autoincrement + self.comment = comment + if type_ is None: + raise util.CommandError( + "All MySQL CHANGE/MODIFY COLUMN operations " + "require the existing type." + ) + + self.type_ = sqltypes.to_instance(type_) + + +class MySQLModifyColumn(MySQLChangeColumn): + pass + + +@compiles(ColumnNullable, "mysql") +@compiles(ColumnName, "mysql") +@compiles(ColumnDefault, "mysql") +@compiles(ColumnType, "mysql") +def _mysql_doesnt_support_individual(element, compiler, **kw): + raise NotImplementedError( + "Individual alter column constructs not supported by MySQL" + ) + + +@compiles(MySQLAlterDefault, "mysql") +def _mysql_alter_default(element, compiler, **kw): + return "%s ALTER COLUMN %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + "SET DEFAULT %s" % format_server_default(compiler, element.default) + if element.default is not None + else "DROP DEFAULT", + ) + + +@compiles(MySQLModifyColumn, "mysql") +def _mysql_modify_column(element, compiler, **kw): + return "%s MODIFY %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + _mysql_colspec( + compiler, + nullable=element.nullable, + server_default=element.default, + type_=element.type_, + autoincrement=element.autoincrement, + comment=element.comment, + ), + ) + + +@compiles(MySQLChangeColumn, "mysql") +def _mysql_change_column(element, compiler, **kw): + return "%s CHANGE %s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + _mysql_colspec( + compiler, + nullable=element.nullable, + server_default=element.default, + type_=element.type_, + autoincrement=element.autoincrement, + comment=element.comment, + ), + ) + + +def _render_value(compiler, expr): + if isinstance(expr, string_types): + return "'%s'" % expr + else: + return compiler.sql_compiler.process(expr) + + +def _mysql_colspec( + compiler, nullable, server_default, type_, autoincrement, comment +): + spec = "%s %s" % ( + compiler.dialect.type_compiler.process(type_), + "NULL" if nullable else "NOT NULL", + ) + if autoincrement: + spec += " AUTO_INCREMENT" + if server_default is not False and server_default is not None: + spec += " DEFAULT %s" % _render_value(compiler, server_default) + if comment: + spec += " COMMENT %s" % compiler.sql_compiler.render_literal_value( + comment, sqltypes.String() + ) + + return spec + + +@compiles(schema.DropConstraint, "mysql") +def _mysql_drop_constraint(element, compiler, **kw): + """Redefine SQLAlchemy's drop constraint to + raise errors for invalid constraint type.""" + + constraint = element.element + if isinstance( + constraint, + ( + schema.ForeignKeyConstraint, + schema.PrimaryKeyConstraint, + schema.UniqueConstraint, + ), + ): + return compiler.visit_drop_constraint(element, **kw) + elif isinstance(constraint, schema.CheckConstraint): + # note that SQLAlchemy as of 1.2 does not yet support + # DROP CONSTRAINT for MySQL/MariaDB, so we implement fully + # here. + if _is_mariadb(compiler.dialect): + return "ALTER TABLE %s DROP CONSTRAINT %s" % ( + compiler.preparer.format_table(constraint.table), + compiler.preparer.format_constraint(constraint), + ) + else: + return "ALTER TABLE %s DROP CHECK %s" % ( + compiler.preparer.format_table(constraint.table), + compiler.preparer.format_constraint(constraint), + ) + else: + raise NotImplementedError( + "No generic 'DROP CONSTRAINT' in MySQL - " + "please specify constraint type" + ) diff --git a/py3/lib/python3.6/site-packages/alembic/ddl/oracle.py b/py3/lib/python3.6/site-packages/alembic/ddl/oracle.py new file mode 100644 index 0000000..76cf7c5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/ddl/oracle.py @@ -0,0 +1,109 @@ +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.sql import sqltypes + +from .base import AddColumn +from .base import alter_table +from .base import ColumnComment +from .base import ColumnDefault +from .base import ColumnName +from .base import ColumnNullable +from .base import ColumnType +from .base import format_column_name +from .base import format_server_default +from .base import format_type +from .impl import DefaultImpl + + +class OracleImpl(DefaultImpl): + __dialect__ = "oracle" + transactional_ddl = False + batch_separator = "/" + command_terminator = "" + + def __init__(self, *arg, **kw): + super(OracleImpl, self).__init__(*arg, **kw) + self.batch_separator = self.context_opts.get( + "oracle_batch_separator", self.batch_separator + ) + + def _exec(self, construct, *args, **kw): + result = super(OracleImpl, self)._exec(construct, *args, **kw) + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + return result + + def emit_begin(self): + self._exec("SET TRANSACTION READ WRITE") + + def emit_commit(self): + self._exec("COMMIT") + + +@compiles(AddColumn, "oracle") +def visit_add_column(element, compiler, **kw): + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + add_column(compiler, element.column, **kw), + ) + + +@compiles(ColumnNullable, "oracle") +def visit_column_nullable(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "NULL" if element.nullable else "NOT NULL", + ) + + +@compiles(ColumnType, "oracle") +def visit_column_type(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "%s" % format_type(compiler, element.type_), + ) + + +@compiles(ColumnName, "oracle") +def visit_column_name(element, compiler, **kw): + return "%s RENAME COLUMN %s TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + ) + + +@compiles(ColumnDefault, "oracle") +def visit_column_default(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "DEFAULT %s" % format_server_default(compiler, element.default) + if element.default is not None + else "DEFAULT NULL", + ) + + +@compiles(ColumnComment, "oracle") +def visit_column_comment(element, compiler, **kw): + ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}" + + comment = compiler.sql_compiler.render_literal_value( + (element.comment if element.comment is not None else ""), + sqltypes.String(), + ) + + return ddl.format( + table_name=element.table_name, + column_name=element.column_name, + comment=comment, + ) + + +def alter_column(compiler, name): + return "MODIFY %s" % format_column_name(compiler, name) + + +def add_column(compiler, column, **kw): + return "ADD %s" % compiler.get_column_specification(column, **kw) diff --git a/py3/lib/python3.6/site-packages/alembic/ddl/postgresql.py b/py3/lib/python3.6/site-packages/alembic/ddl/postgresql.py new file mode 100644 index 0000000..255c7e6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/ddl/postgresql.py @@ -0,0 +1,537 @@ +import logging +import re + +from sqlalchemy import Column +from sqlalchemy import Numeric +from sqlalchemy import text +from sqlalchemy import types as sqltypes +from sqlalchemy.dialects.postgresql import BIGINT +from sqlalchemy.dialects.postgresql import INTEGER +from sqlalchemy.sql.expression import ColumnClause +from sqlalchemy.sql.expression import UnaryExpression +from sqlalchemy.types import NULLTYPE + +from .base import alter_column +from .base import alter_table +from .base import AlterColumn +from .base import ColumnComment +from .base import compiles +from .base import format_table_name +from .base import format_type +from .base import RenameTable +from .impl import DefaultImpl +from .. import util +from ..autogenerate import render +from ..operations import ops +from ..operations import schemaobj +from ..operations.base import BatchOperations +from ..operations.base import Operations +from ..util import compat +from ..util import sqla_compat + +if util.sqla_100: + from sqlalchemy.dialects.postgresql import ExcludeConstraint + + +log = logging.getLogger(__name__) + + +class PostgresqlImpl(DefaultImpl): + __dialect__ = "postgresql" + transactional_ddl = True + + def prep_table_for_batch(self, table): + for constraint in table.constraints: + if constraint.name is not None: + self.drop_constraint(constraint) + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + # don't do defaults for SERIAL columns + if ( + metadata_column.primary_key + and metadata_column is metadata_column.table._autoincrement_column + ): + return False + + conn_col_default = rendered_inspector_default + + defaults_equal = conn_col_default == rendered_metadata_default + if defaults_equal: + return False + + if None in (conn_col_default, rendered_metadata_default): + return not defaults_equal + + if compat.py2k: + # look for a python 2 "u''" string and filter + m = re.match(r"^u'(.*)'$", rendered_metadata_default) + if m: + rendered_metadata_default = "'%s'" % m.group(1) + + # check for unquoted string and quote for PG String types + if ( + not isinstance(inspector_column.type, Numeric) and + metadata_column.server_default is not None + and isinstance( + metadata_column.server_default.arg, compat.string_types + ) + and not re.match(r"^'.*'$", rendered_metadata_default) + ): + rendered_metadata_default = "'%s'" % rendered_metadata_default + + return not self.connection.scalar( + "SELECT %s = %s" % (conn_col_default, rendered_metadata_default) + ) + + def alter_column( + self, + table_name, + column_name, + nullable=None, + server_default=False, + name=None, + type_=None, + schema=None, + autoincrement=None, + existing_type=None, + existing_server_default=None, + existing_nullable=None, + existing_autoincrement=None, + **kw + ): + + using = kw.pop("postgresql_using", None) + + if using is not None and type_ is None: + raise util.CommandError( + "postgresql_using must be used with the type_ parameter" + ) + + if type_ is not None: + self._exec( + PostgresqlColumnType( + table_name, + column_name, + type_, + schema=schema, + using=using, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + ) + ) + + super(PostgresqlImpl, self).alter_column( + table_name, + column_name, + nullable=nullable, + server_default=server_default, + name=name, + schema=schema, + autoincrement=autoincrement, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_autoincrement=existing_autoincrement, + **kw + ) + + def autogen_column_reflect(self, inspector, table, column_info): + if column_info.get("default") and isinstance( + column_info["type"], (INTEGER, BIGINT) + ): + seq_match = re.match( + r"nextval\('(.+?)'::regclass\)", column_info["default"] + ) + if seq_match: + info = inspector.bind.execute( + text( + "select c.relname, a.attname " + "from pg_class as c join " + "pg_depend d on d.objid=c.oid and " + "d.classid='pg_class'::regclass and " + "d.refclassid='pg_class'::regclass " + "join pg_class t on t.oid=d.refobjid " + "join pg_attribute a on a.attrelid=t.oid and " + "a.attnum=d.refobjsubid " + "where c.relkind='S' and c.relname=:seqname" + ), + seqname=seq_match.group(1), + ).first() + if info: + seqname, colname = info + if colname == column_info["name"]: + log.info( + "Detected sequence named '%s' as " + "owned by integer column '%s(%s)', " + "assuming SERIAL and omitting", + seqname, + table.name, + colname, + ) + # sequence, and the owner is this column, + # its a SERIAL - whack it! + del column_info["default"] + + def correct_for_autogen_constraints( + self, + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ): + + conn_uniques_by_name = dict( + (c.name, c) for c in conn_unique_constraints + ) + conn_indexes_by_name = dict((c.name, c) for c in conn_indexes) + + if not util.sqla_100: + doubled_constraints = set( + conn_indexes_by_name[name] + for name in set(conn_uniques_by_name).intersection( + conn_indexes_by_name + ) + ) + else: + doubled_constraints = set( + index + for index in conn_indexes + if index.info.get("duplicates_constraint") + ) + + for ix in doubled_constraints: + conn_indexes.remove(ix) + + for idx in list(metadata_indexes): + if idx.name in conn_indexes_by_name: + continue + exprs = idx.expressions + for expr in exprs: + while isinstance(expr, UnaryExpression): + expr = expr.element + if not isinstance(expr, Column): + util.warn( + "autogenerate skipping functional index %s; " + "not supported by SQLAlchemy reflection" % idx.name + ) + metadata_indexes.discard(idx) + + def render_type(self, type_, autogen_context): + mod = type(type_).__module__ + if not mod.startswith("sqlalchemy.dialects.postgresql"): + return False + + if hasattr(self, "_render_%s_type" % type_.__visit_name__): + meth = getattr(self, "_render_%s_type" % type_.__visit_name__) + return meth(type_, autogen_context) + + return False + + def _render_HSTORE_type(self, type_, autogen_context): + return render._render_type_w_subtype( + type_, autogen_context, "text_type", r"(.+?\(.*text_type=)" + ) + + def _render_ARRAY_type(self, type_, autogen_context): + return render._render_type_w_subtype( + type_, autogen_context, "item_type", r"(.+?\()" + ) + + def _render_JSON_type(self, type_, autogen_context): + return render._render_type_w_subtype( + type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)" + ) + + def _render_JSONB_type(self, type_, autogen_context): + return render._render_type_w_subtype( + type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)" + ) + + +class PostgresqlColumnType(AlterColumn): + def __init__(self, name, column_name, type_, **kw): + using = kw.pop("using", None) + super(PostgresqlColumnType, self).__init__(name, column_name, **kw) + self.type_ = sqltypes.to_instance(type_) + self.using = using + + +@compiles(RenameTable, "postgresql") +def visit_rename_table(element, compiler, **kw): + return "%s RENAME TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, None), + ) + + +@compiles(PostgresqlColumnType, "postgresql") +def visit_column_type(element, compiler, **kw): + return "%s %s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "TYPE %s" % format_type(compiler, element.type_), + "USING %s" % element.using if element.using else "", + ) + + +@compiles(ColumnComment, "postgresql") +def visit_column_comment(element, compiler, **kw): + ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}" + comment = ( + compiler.sql_compiler.render_literal_value( + element.comment, sqltypes.String() + ) + if element.comment is not None + else "NULL" + ) + + return ddl.format( + table_name=element.table_name, + column_name=element.column_name, + comment=comment, + ) + + +@Operations.register_operation("create_exclude_constraint") +@BatchOperations.register_operation( + "create_exclude_constraint", "batch_create_exclude_constraint" +) +@ops.AddConstraintOp.register_add_constraint("exclude_constraint") +class CreateExcludeConstraintOp(ops.AddConstraintOp): + """Represent a create exclude constraint operation.""" + + constraint_type = "exclude" + + def __init__( + self, + constraint_name, + table_name, + elements, + where=None, + schema=None, + _orig_constraint=None, + **kw + ): + self.constraint_name = constraint_name + self.table_name = table_name + self.elements = elements + self.where = where + self.schema = schema + self._orig_constraint = _orig_constraint + self.kw = kw + + @classmethod + def from_constraint(cls, constraint): + constraint_table = sqla_compat._table_for_constraint(constraint) + + return cls( + constraint.name, + constraint_table.name, + [(expr, op) for expr, name, op in constraint._render_exprs], + where=constraint.where, + schema=constraint_table.schema, + _orig_constraint=constraint, + deferrable=constraint.deferrable, + initially=constraint.initially, + using=constraint.using, + ) + + def to_constraint(self, migration_context=None): + if not util.sqla_100: + raise NotImplementedError( + "ExcludeConstraint not supported until SQLAlchemy 1.0" + ) + if self._orig_constraint is not None: + return self._orig_constraint + schema_obj = schemaobj.SchemaObjects(migration_context) + t = schema_obj.table(self.table_name, schema=self.schema) + excl = ExcludeConstraint( + *self.elements, + name=self.constraint_name, + where=self.where, + **self.kw + ) + for expr, name, oper in excl._render_exprs: + t.append_column(Column(name, NULLTYPE)) + t.append_constraint(excl) + return excl + + @classmethod + def create_exclude_constraint( + cls, operations, constraint_name, table_name, *elements, **kw + ): + """Issue an alter to create an EXCLUDE constraint using the + current migration context. + + .. note:: This method is Postgresql specific, and additionally + requires at least SQLAlchemy 1.0. + + e.g.:: + + from alembic import op + + op.create_exclude_constraint( + "user_excl", + "user", + + ("period", '&&'), + ("group", '='), + where=("group != 'some group'") + + ) + + Note that the expressions work the same way as that of + the ``ExcludeConstraint`` object itself; if plain strings are + passed, quoting rules must be applied manually. + + :param name: Name of the constraint. + :param table_name: String name of the source table. + :param elements: exclude conditions. + :param where: SQL expression or SQL string with optional WHERE + clause. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.9.0 + + """ + op = cls(constraint_name, table_name, elements, **kw) + return operations.invoke(op) + + @classmethod + def batch_create_exclude_constraint( + cls, operations, constraint_name, *elements, **kw + ): + """Issue a "create exclude constraint" instruction using the + current batch migration context. + + .. note:: This method is Postgresql specific, and additionally + requires at least SQLAlchemy 1.0. + + .. versionadded:: 0.9.0 + + .. seealso:: + + :meth:`.Operations.create_exclude_constraint` + + """ + kw["schema"] = operations.impl.schema + op = cls(constraint_name, operations.impl.table_name, elements, **kw) + return operations.invoke(op) + + +@render.renderers.dispatch_for(CreateExcludeConstraintOp) +def _add_exclude_constraint(autogen_context, op): + return _exclude_constraint(op.to_constraint(), autogen_context, alter=True) + + +if util.sqla_100: + + @render._constraint_renderers.dispatch_for(ExcludeConstraint) + def _render_inline_exclude_constraint(constraint, autogen_context): + rendered = render._user_defined_render( + "exclude", constraint, autogen_context + ) + if rendered is not False: + return rendered + + return _exclude_constraint(constraint, autogen_context, False) + + +def _postgresql_autogenerate_prefix(autogen_context): + + imports = autogen_context.imports + if imports is not None: + imports.add("from sqlalchemy.dialects import postgresql") + return "postgresql." + + +def _exclude_constraint(constraint, autogen_context, alter): + opts = [] + + has_batch = autogen_context._has_batch + + if constraint.deferrable: + opts.append(("deferrable", str(constraint.deferrable))) + if constraint.initially: + opts.append(("initially", str(constraint.initially))) + if constraint.using: + opts.append(("using", str(constraint.using))) + if not has_batch and alter and constraint.table.schema: + opts.append(("schema", render._ident(constraint.table.schema))) + if not alter and constraint.name: + opts.append( + ("name", render._render_gen_name(autogen_context, constraint.name)) + ) + + if alter: + args = [ + repr(render._render_gen_name(autogen_context, constraint.name)) + ] + if not has_batch: + args += [repr(render._ident(constraint.table.name))] + args.extend( + [ + "(%s, %r)" + % ( + _render_potential_column(sqltext, autogen_context), + opstring, + ) + for sqltext, name, opstring in constraint._render_exprs + ] + ) + if constraint.where is not None: + args.append( + "where=%s" + % render._render_potential_expr( + constraint.where, autogen_context + ) + ) + args.extend(["%s=%r" % (k, v) for k, v in opts]) + return "%(prefix)screate_exclude_constraint(%(args)s)" % { + "prefix": render._alembic_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + else: + args = [ + "(%s, %r)" + % (_render_potential_column(sqltext, autogen_context), opstring) + for sqltext, name, opstring in constraint._render_exprs + ] + if constraint.where is not None: + args.append( + "where=%s" + % render._render_potential_expr( + constraint.where, autogen_context + ) + ) + args.extend(["%s=%r" % (k, v) for k, v in opts]) + return "%(prefix)sExcludeConstraint(%(args)s)" % { + "prefix": _postgresql_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + + +def _render_potential_column(value, autogen_context): + if isinstance(value, ColumnClause): + template = "%(prefix)scolumn(%(name)r)" + + return template % { + "prefix": render._sqlalchemy_autogenerate_prefix(autogen_context), + "name": value.name, + } + + else: + return render._render_potential_expr( + value, autogen_context, wrap_in_text=False + ) diff --git a/py3/lib/python3.6/site-packages/alembic/ddl/sqlite.py b/py3/lib/python3.6/site-packages/alembic/ddl/sqlite.py new file mode 100644 index 0000000..95f814a --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/ddl/sqlite.py @@ -0,0 +1,156 @@ +import re + +from .impl import DefaultImpl +from .. import util + + +class SQLiteImpl(DefaultImpl): + __dialect__ = "sqlite" + + transactional_ddl = False + """SQLite supports transactional DDL, but pysqlite does not: + see: http://bugs.python.org/issue10740 + """ + + def requires_recreate_in_batch(self, batch_op): + """Return True if the given :class:`.BatchOperationsImpl` + would need the table to be recreated and copied in order to + proceed. + + Normally, only returns True on SQLite when operations other + than add_column are present. + + """ + for op in batch_op.batch: + if op[0] not in ("add_column", "create_index", "drop_index"): + return True + else: + return False + + def add_constraint(self, const): + # attempt to distinguish between an + # auto-gen constraint and an explicit one + if const._create_rule is None: + raise NotImplementedError( + "No support for ALTER of constraints in SQLite dialect" + ) + elif const._create_rule(self): + util.warn( + "Skipping unsupported ALTER for " + "creation of implicit constraint" + ) + + def drop_constraint(self, const): + if const._create_rule is None: + raise NotImplementedError( + "No support for ALTER of constraints in SQLite dialect" + ) + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + + if rendered_metadata_default is not None: + rendered_metadata_default = re.sub( + r"^\((.+)\)$", r"\1", rendered_metadata_default + ) + + rendered_metadata_default = re.sub( + r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default + ) + + if rendered_inspector_default is not None: + rendered_inspector_default = re.sub( + r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default + ) + + return rendered_inspector_default != rendered_metadata_default + + def correct_for_autogen_constraints( + self, + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ): + + if util.sqla_100: + return + + # adjustments to accommodate for SQLite unnamed unique constraints + # not being reported from the backend; this was updated in + # SQLA 1.0. + + def uq_sig(uq): + return tuple(sorted(uq.columns.keys())) + + conn_unique_sigs = set(uq_sig(uq) for uq in conn_unique_constraints) + + for idx in list(metadata_unique_constraints): + # SQLite backend can't report on unnamed UNIQUE constraints, + # so remove these, unless we see an exact signature match + if idx.name is None and uq_sig(idx) not in conn_unique_sigs: + metadata_unique_constraints.remove(idx) + + def _guess_if_default_is_unparenthesized_sql_expr(self, expr): + """Determine if a server default is a SQL expression or a constant. + + There are too many assertions that expect server defaults to round-trip + identically without parenthesis added so we will add parens only in + very specific cases. + + """ + if not expr: + return False + elif re.match(r"^[0-9\.]$", expr): + return False + elif re.match(r"^'.+'$", expr): + return False + elif re.match(r"^\(.+\)$", expr): + return False + else: + return True + + def autogen_column_reflect(self, inspector, table, column_info): + # SQLite expression defaults require parenthesis when sent + # as DDL + if self._guess_if_default_is_unparenthesized_sql_expr( + column_info.get("default", None) + ): + column_info["default"] = "(%s)" % (column_info["default"],) + + def render_ddl_sql_expr(self, expr, is_server_default=False, **kw): + # SQLite expression defaults require parenthesis when sent + # as DDL + str_expr = super(SQLiteImpl, self).render_ddl_sql_expr( + expr, is_server_default=is_server_default, **kw + ) + + if ( + is_server_default + and self._guess_if_default_is_unparenthesized_sql_expr(str_expr) + ): + str_expr = "(%s)" % (str_expr,) + return str_expr + + +# @compiles(AddColumn, 'sqlite') +# def visit_add_column(element, compiler, **kw): +# return "%s %s" % ( +# alter_table(compiler, element.table_name, element.schema), +# add_column(compiler, element.column, **kw) +# ) + + +# def add_column(compiler, column, **kw): +# text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) +# need to modify SQLAlchemy so that the CHECK associated with a Boolean +# or Enum gets placed as part of the column constraints, not the Table +# see ticket 98 +# for const in column.constraints: +# text += compiler.process(AddConstraint(const)) +# return text diff --git a/py3/lib/python3.6/site-packages/alembic/op.py b/py3/lib/python3.6/site-packages/alembic/op.py new file mode 100644 index 0000000..f3f5fac --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/op.py @@ -0,0 +1,5 @@ +from .operations.base import Operations + +# create proxy functions for +# each method on the Operations class. +Operations.create_module_class_proxy(globals(), locals()) diff --git a/py3/lib/python3.6/site-packages/alembic/operations/__init__.py b/py3/lib/python3.6/site-packages/alembic/operations/__init__.py new file mode 100644 index 0000000..dc2d3a4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/operations/__init__.py @@ -0,0 +1,7 @@ +from . import toimpl # noqa +from .base import BatchOperations +from .base import Operations +from .ops import MigrateOperation + + +__all__ = ["Operations", "BatchOperations", "MigrateOperation"] diff --git a/py3/lib/python3.6/site-packages/alembic/operations/base.py b/py3/lib/python3.6/site-packages/alembic/operations/base.py new file mode 100644 index 0000000..a4adf15 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/operations/base.py @@ -0,0 +1,473 @@ +from contextlib import contextmanager +import textwrap + +from . import batch +from . import schemaobj +from .. import util +from ..util import sqla_compat +from ..util.compat import exec_ +from ..util.compat import inspect_formatargspec +from ..util.compat import inspect_getargspec + +__all__ = ("Operations", "BatchOperations") + +try: + from sqlalchemy.sql.naming import conv +except: + conv = None + + +class Operations(util.ModuleClsProxy): + + """Define high level migration operations. + + Each operation corresponds to some schema migration operation, + executed against a particular :class:`.MigrationContext` + which in turn represents connectivity to a database, + or a file output stream. + + While :class:`.Operations` is normally configured as + part of the :meth:`.EnvironmentContext.run_migrations` + method called from an ``env.py`` script, a standalone + :class:`.Operations` instance can be + made for use cases external to regular Alembic + migrations by passing in a :class:`.MigrationContext`:: + + from alembic.migration import MigrationContext + from alembic.operations import Operations + + conn = myengine.connect() + ctx = MigrationContext.configure(conn) + op = Operations(ctx) + + op.alter_column("t", "c", nullable=True) + + Note that as of 0.8, most of the methods on this class are produced + dynamically using the :meth:`.Operations.register_operation` + method. + + """ + + _to_impl = util.Dispatcher() + + def __init__(self, migration_context, impl=None): + """Construct a new :class:`.Operations` + + :param migration_context: a :class:`.MigrationContext` + instance. + + """ + self.migration_context = migration_context + if impl is None: + self.impl = migration_context.impl + else: + self.impl = impl + + self.schema_obj = schemaobj.SchemaObjects(migration_context) + + @classmethod + def register_operation(cls, name, sourcename=None): + """Register a new operation for this class. + + This method is normally used to add new operations + to the :class:`.Operations` class, and possibly the + :class:`.BatchOperations` class as well. All Alembic migration + operations are implemented via this system, however the system + is also available as a public API to facilitate adding custom + operations. + + .. versionadded:: 0.8.0 + + .. seealso:: + + :ref:`operation_plugins` + + + """ + + def register(op_cls): + if sourcename is None: + fn = getattr(op_cls, name) + source_name = fn.__name__ + else: + fn = getattr(op_cls, sourcename) + source_name = fn.__name__ + + spec = inspect_getargspec(fn) + + name_args = spec[0] + assert name_args[0:2] == ["cls", "operations"] + + name_args[0:2] = ["self"] + + args = inspect_formatargspec(*spec) + num_defaults = len(spec[3]) if spec[3] else 0 + if num_defaults: + defaulted_vals = name_args[0 - num_defaults :] + else: + defaulted_vals = () + + apply_kw = inspect_formatargspec( + name_args, + spec[1], + spec[2], + defaulted_vals, + formatvalue=lambda x: "=" + x, + ) + + func_text = textwrap.dedent( + """\ + def %(name)s%(args)s: + %(doc)r + return op_cls.%(source_name)s%(apply_kw)s + """ + % { + "name": name, + "source_name": source_name, + "args": args, + "apply_kw": apply_kw, + "doc": fn.__doc__, + "meth": fn.__name__, + } + ) + globals_ = {"op_cls": op_cls} + lcl = {} + exec_(func_text, globals_, lcl) + setattr(cls, name, lcl[name]) + fn.__func__.__doc__ = ( + "This method is proxied on " + "the :class:`.%s` class, via the :meth:`.%s.%s` method." + % (cls.__name__, cls.__name__, name) + ) + if hasattr(fn, "_legacy_translations"): + lcl[name]._legacy_translations = fn._legacy_translations + return op_cls + + return register + + @classmethod + def implementation_for(cls, op_cls): + """Register an implementation for a given :class:`.MigrateOperation`. + + This is part of the operation extensibility API. + + .. seealso:: + + :ref:`operation_plugins` - example of use + + """ + + def decorate(fn): + cls._to_impl.dispatch_for(op_cls)(fn) + return fn + + return decorate + + @classmethod + @contextmanager + def context(cls, migration_context): + op = Operations(migration_context) + op._install_proxy() + yield op + op._remove_proxy() + + @contextmanager + def batch_alter_table( + self, + table_name, + schema=None, + recreate="auto", + copy_from=None, + table_args=(), + table_kwargs=util.immutabledict(), + reflect_args=(), + reflect_kwargs=util.immutabledict(), + naming_convention=None, + ): + """Invoke a series of per-table migrations in batch. + + Batch mode allows a series of operations specific to a table + to be syntactically grouped together, and allows for alternate + modes of table migration, in particular the "recreate" style of + migration required by SQLite. + + "recreate" style is as follows: + + 1. A new table is created with the new specification, based on the + migration directives within the batch, using a temporary name. + + 2. the data copied from the existing table to the new table. + + 3. the existing table is dropped. + + 4. the new table is renamed to the existing table name. + + The directive by default will only use "recreate" style on the + SQLite backend, and only if directives are present which require + this form, e.g. anything other than ``add_column()``. The batch + operation on other backends will proceed using standard ALTER TABLE + operations. + + The method is used as a context manager, which returns an instance + of :class:`.BatchOperations`; this object is the same as + :class:`.Operations` except that table names and schema names + are omitted. E.g.:: + + with op.batch_alter_table("some_table") as batch_op: + batch_op.add_column(Column('foo', Integer)) + batch_op.drop_column('bar') + + The operations within the context manager are invoked at once + when the context is ended. When run against SQLite, if the + migrations include operations not supported by SQLite's ALTER TABLE, + the entire table will be copied to a new one with the new + specification, moving all data across as well. + + The copy operation by default uses reflection to retrieve the current + structure of the table, and therefore :meth:`.batch_alter_table` + in this mode requires that the migration is run in "online" mode. + The ``copy_from`` parameter may be passed which refers to an existing + :class:`.Table` object, which will bypass this reflection step. + + .. note:: The table copy operation will currently not copy + CHECK constraints, and may not copy UNIQUE constraints that are + unnamed, as is possible on SQLite. See the section + :ref:`sqlite_batch_constraints` for workarounds. + + :param table_name: name of table + :param schema: optional schema name. + :param recreate: under what circumstances the table should be + recreated. At its default of ``"auto"``, the SQLite dialect will + recreate the table if any operations other than ``add_column()``, + ``create_index()``, or ``drop_index()`` are + present. Other options include ``"always"`` and ``"never"``. + :param copy_from: optional :class:`~sqlalchemy.schema.Table` object + that will act as the structure of the table being copied. If omitted, + table reflection is used to retrieve the structure of the table. + + .. versionadded:: 0.7.6 Fully implemented the + :paramref:`~.Operations.batch_alter_table.copy_from` + parameter. + + .. seealso:: + + :ref:`batch_offline_mode` + + :paramref:`~.Operations.batch_alter_table.reflect_args` + + :paramref:`~.Operations.batch_alter_table.reflect_kwargs` + + :param reflect_args: a sequence of additional positional arguments that + will be applied to the table structure being reflected / copied; + this may be used to pass column and constraint overrides to the + table that will be reflected, in lieu of passing the whole + :class:`~sqlalchemy.schema.Table` using + :paramref:`~.Operations.batch_alter_table.copy_from`. + + .. versionadded:: 0.7.1 + + :param reflect_kwargs: a dictionary of additional keyword arguments + that will be applied to the table structure being copied; this may be + used to pass additional table and reflection options to the table that + will be reflected, in lieu of passing the whole + :class:`~sqlalchemy.schema.Table` using + :paramref:`~.Operations.batch_alter_table.copy_from`. + + .. versionadded:: 0.7.1 + + :param table_args: a sequence of additional positional arguments that + will be applied to the new :class:`~sqlalchemy.schema.Table` when + created, in addition to those copied from the source table. + This may be used to provide additional constraints such as CHECK + constraints that may not be reflected. + :param table_kwargs: a dictionary of additional keyword arguments + that will be applied to the new :class:`~sqlalchemy.schema.Table` + when created, in addition to those copied from the source table. + This may be used to provide for additional table options that may + not be reflected. + + .. versionadded:: 0.7.0 + + :param naming_convention: a naming convention dictionary of the form + described at :ref:`autogen_naming_conventions` which will be applied + to the :class:`~sqlalchemy.schema.MetaData` during the reflection + process. This is typically required if one wants to drop SQLite + constraints, as these constraints will not have names when + reflected on this backend. Requires SQLAlchemy **0.9.4** or greater. + + .. seealso:: + + :ref:`dropping_sqlite_foreign_keys` + + .. versionadded:: 0.7.1 + + .. note:: batch mode requires SQLAlchemy 0.8 or above. + + .. seealso:: + + :ref:`batch_migrations` + + """ + impl = batch.BatchOperationsImpl( + self, + table_name, + schema, + recreate, + copy_from, + table_args, + table_kwargs, + reflect_args, + reflect_kwargs, + naming_convention, + ) + batch_op = BatchOperations(self.migration_context, impl=impl) + yield batch_op + impl.flush() + + def get_context(self): + """Return the :class:`.MigrationContext` object that's + currently in use. + + """ + + return self.migration_context + + def invoke(self, operation): + """Given a :class:`.MigrateOperation`, invoke it in terms of + this :class:`.Operations` instance. + + .. versionadded:: 0.8.0 + + """ + fn = self._to_impl.dispatch( + operation, self.migration_context.impl.__dialect__ + ) + return fn(self, operation) + + def f(self, name): + """Indicate a string name that has already had a naming convention + applied to it. + + This feature combines with the SQLAlchemy ``naming_convention`` feature + to disambiguate constraint names that have already had naming + conventions applied to them, versus those that have not. This is + necessary in the case that the ``"%(constraint_name)s"`` token + is used within a naming convention, so that it can be identified + that this particular name should remain fixed. + + If the :meth:`.Operations.f` is used on a constraint, the naming + convention will not take effect:: + + op.add_column('t', 'x', Boolean(name=op.f('ck_bool_t_x'))) + + Above, the CHECK constraint generated will have the name + ``ck_bool_t_x`` regardless of whether or not a naming convention is + in use. + + Alternatively, if a naming convention is in use, and 'f' is not used, + names will be converted along conventions. If the ``target_metadata`` + contains the naming convention + ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the + output of the following: + + op.add_column('t', 'x', Boolean(name='x')) + + will be:: + + CONSTRAINT ck_bool_t_x CHECK (x in (1, 0))) + + The function is rendered in the output of autogenerate when + a particular constraint name is already converted, for SQLAlchemy + version **0.9.4 and greater only**. Even though ``naming_convention`` + was introduced in 0.9.2, the string disambiguation service is new + as of 0.9.4. + + .. versionadded:: 0.6.4 + + """ + if conv: + return conv(name) + else: + raise NotImplementedError( + "op.f() feature requires SQLAlchemy 0.9.4 or greater." + ) + + def inline_literal(self, value, type_=None): + r"""Produce an 'inline literal' expression, suitable for + using in an INSERT, UPDATE, or DELETE statement. + + When using Alembic in "offline" mode, CRUD operations + aren't compatible with SQLAlchemy's default behavior surrounding + literal values, + which is that they are converted into bound values and passed + separately into the ``execute()`` method of the DBAPI cursor. + An offline SQL + script needs to have these rendered inline. While it should + always be noted that inline literal values are an **enormous** + security hole in an application that handles untrusted input, + a schema migration is not run in this context, so + literals are safe to render inline, with the caveat that + advanced types like dates may not be supported directly + by SQLAlchemy. + + See :meth:`.execute` for an example usage of + :meth:`.inline_literal`. + + The environment can also be configured to attempt to render + "literal" values inline automatically, for those simple types + that are supported by the dialect; see + :paramref:`.EnvironmentContext.configure.literal_binds` for this + more recently added feature. + + :param value: The value to render. Strings, integers, and simple + numerics should be supported. Other types like boolean, + dates, etc. may or may not be supported yet by various + backends. + :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine` + subclass stating the type of this value. In SQLAlchemy + expressions, this is usually derived automatically + from the Python type of the value itself, as well as + based on the context in which the value is used. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.literal_binds` + + """ + return sqla_compat._literal_bindparam(None, value, type_=type_) + + def get_bind(self): + """Return the current 'bind'. + + Under normal circumstances, this is the + :class:`~sqlalchemy.engine.Connection` currently being used + to emit SQL to the database. + + In a SQL script context, this value is ``None``. [TODO: verify this] + + """ + return self.migration_context.impl.bind + + +class BatchOperations(Operations): + """Modifies the interface :class:`.Operations` for batch mode. + + This basically omits the ``table_name`` and ``schema`` parameters + from associated methods, as these are a given when running under batch + mode. + + .. seealso:: + + :meth:`.Operations.batch_alter_table` + + Note that as of 0.8, most of the methods on this class are produced + dynamically using the :meth:`.Operations.register_operation` + method. + + """ + + def _noop(self, operation): + raise NotImplementedError( + "The %s method does not apply to a batch table alter operation." + % operation + ) diff --git a/py3/lib/python3.6/site-packages/alembic/operations/batch.py b/py3/lib/python3.6/site-packages/alembic/operations/batch.py new file mode 100644 index 0000000..42db905 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/operations/batch.py @@ -0,0 +1,425 @@ +from sqlalchemy import cast +from sqlalchemy import CheckConstraint +from sqlalchemy import Column +from sqlalchemy import ForeignKeyConstraint +from sqlalchemy import Index +from sqlalchemy import MetaData +from sqlalchemy import PrimaryKeyConstraint +from sqlalchemy import schema as sql_schema +from sqlalchemy import select +from sqlalchemy import Table +from sqlalchemy import types as sqltypes +from sqlalchemy.events import SchemaEventTarget +from sqlalchemy.util import OrderedDict + +from ..util.sqla_compat import _columns_for_constraint +from ..util.sqla_compat import _fk_is_self_referential +from ..util.sqla_compat import _is_type_bound +from ..util.sqla_compat import _remove_column_from_collection + + +class BatchOperationsImpl(object): + def __init__( + self, + operations, + table_name, + schema, + recreate, + copy_from, + table_args, + table_kwargs, + reflect_args, + reflect_kwargs, + naming_convention, + ): + self.operations = operations + self.table_name = table_name + self.schema = schema + if recreate not in ("auto", "always", "never"): + raise ValueError( + "recreate may be one of 'auto', 'always', or 'never'." + ) + self.recreate = recreate + self.copy_from = copy_from + self.table_args = table_args + self.table_kwargs = dict(table_kwargs) + self.reflect_args = reflect_args + self.reflect_kwargs = dict(reflect_kwargs) + self.reflect_kwargs.setdefault( + "listeners", list(self.reflect_kwargs.get("listeners", ())) + ) + self.reflect_kwargs["listeners"].append( + ("column_reflect", operations.impl.autogen_column_reflect) + ) + self.naming_convention = naming_convention + self.batch = [] + + @property + def dialect(self): + return self.operations.impl.dialect + + @property + def impl(self): + return self.operations.impl + + def _should_recreate(self): + if self.recreate == "auto": + return self.operations.impl.requires_recreate_in_batch(self) + elif self.recreate == "always": + return True + else: + return False + + def flush(self): + should_recreate = self._should_recreate() + + if not should_recreate: + for opname, arg, kw in self.batch: + fn = getattr(self.operations.impl, opname) + fn(*arg, **kw) + else: + if self.naming_convention: + m1 = MetaData(naming_convention=self.naming_convention) + else: + m1 = MetaData() + + if self.copy_from is not None: + existing_table = self.copy_from + reflected = False + else: + existing_table = Table( + self.table_name, + m1, + schema=self.schema, + autoload=True, + autoload_with=self.operations.get_bind(), + *self.reflect_args, + **self.reflect_kwargs + ) + reflected = True + + batch_impl = ApplyBatchImpl( + existing_table, self.table_args, self.table_kwargs, reflected + ) + for opname, arg, kw in self.batch: + fn = getattr(batch_impl, opname) + fn(*arg, **kw) + + batch_impl._create(self.impl) + + def alter_column(self, *arg, **kw): + self.batch.append(("alter_column", arg, kw)) + + def add_column(self, *arg, **kw): + self.batch.append(("add_column", arg, kw)) + + def drop_column(self, *arg, **kw): + self.batch.append(("drop_column", arg, kw)) + + def add_constraint(self, const): + self.batch.append(("add_constraint", (const,), {})) + + def drop_constraint(self, const): + self.batch.append(("drop_constraint", (const,), {})) + + def rename_table(self, *arg, **kw): + self.batch.append(("rename_table", arg, kw)) + + def create_index(self, idx): + self.batch.append(("create_index", (idx,), {})) + + def drop_index(self, idx): + self.batch.append(("drop_index", (idx,), {})) + + def create_table(self, table): + raise NotImplementedError("Can't create table in batch mode") + + def drop_table(self, table): + raise NotImplementedError("Can't drop table in batch mode") + + +class ApplyBatchImpl(object): + def __init__(self, table, table_args, table_kwargs, reflected): + self.table = table # this is a Table object + self.table_args = table_args + self.table_kwargs = table_kwargs + self.temp_table_name = self._calc_temp_name(table.name) + self.new_table = None + self.column_transfers = OrderedDict( + (c.name, {"expr": c}) for c in self.table.c + ) + self.reflected = reflected + self._grab_table_elements() + + @classmethod + def _calc_temp_name(cls, tablename): + return ("_alembic_tmp_%s" % tablename)[0:50] + + def _grab_table_elements(self): + schema = self.table.schema + self.columns = OrderedDict() + for c in self.table.c: + c_copy = c.copy(schema=schema) + c_copy.unique = c_copy.index = False + # ensure that the type object was copied, + # as we may need to modify it in-place + if isinstance(c.type, SchemaEventTarget): + assert c_copy.type is not c.type + self.columns[c.name] = c_copy + self.named_constraints = {} + self.unnamed_constraints = [] + self.indexes = {} + self.new_indexes = {} + for const in self.table.constraints: + if _is_type_bound(const): + continue + elif self.reflected and isinstance(const, CheckConstraint): + # TODO: we are skipping reflected CheckConstraint because + # we have no way to determine _is_type_bound() for these. + pass + elif const.name: + self.named_constraints[const.name] = const + else: + self.unnamed_constraints.append(const) + + for idx in self.table.indexes: + self.indexes[idx.name] = idx + + for k in self.table.kwargs: + self.table_kwargs.setdefault(k, self.table.kwargs[k]) + + def _transfer_elements_to_new_table(self): + assert self.new_table is None, "Can only create new table once" + + m = MetaData() + schema = self.table.schema + + self.new_table = new_table = Table( + self.temp_table_name, + m, + *(list(self.columns.values()) + list(self.table_args)), + schema=schema, + **self.table_kwargs + ) + + for const in ( + list(self.named_constraints.values()) + self.unnamed_constraints + ): + + const_columns = set( + [c.key for c in _columns_for_constraint(const)] + ) + + if not const_columns.issubset(self.column_transfers): + continue + + if isinstance(const, ForeignKeyConstraint): + if _fk_is_self_referential(const): + # for self-referential constraint, refer to the + # *original* table name, and not _alembic_batch_temp. + # This is consistent with how we're handling + # FK constraints from other tables; we assume SQLite + # no foreign keys just keeps the names unchanged, so + # when we rename back, they match again. + const_copy = const.copy( + schema=schema, target_table=self.table + ) + else: + # "target_table" for ForeignKeyConstraint.copy() is + # only used if the FK is detected as being + # self-referential, which we are handling above. + const_copy = const.copy(schema=schema) + else: + const_copy = const.copy(schema=schema, target_table=new_table) + if isinstance(const, ForeignKeyConstraint): + self._setup_referent(m, const) + new_table.append_constraint(const_copy) + + def _gather_indexes_from_both_tables(self): + idx = [] + idx.extend(self.indexes.values()) + for index in self.new_indexes.values(): + idx.append( + Index( + index.name, + unique=index.unique, + *[self.new_table.c[col] for col in index.columns.keys()], + **index.kwargs + ) + ) + return idx + + def _setup_referent(self, metadata, constraint): + spec = constraint.elements[0]._get_colspec() + parts = spec.split(".") + tname = parts[-2] + if len(parts) == 3: + referent_schema = parts[0] + else: + referent_schema = None + + if tname != self.temp_table_name: + key = sql_schema._get_table_key(tname, referent_schema) + if key in metadata.tables: + t = metadata.tables[key] + for elem in constraint.elements: + colname = elem._get_colspec().split(".")[-1] + if not t.c.contains_column(colname): + t.append_column(Column(colname, sqltypes.NULLTYPE)) + else: + Table( + tname, + metadata, + *[ + Column(n, sqltypes.NULLTYPE) + for n in [ + elem._get_colspec().split(".")[-1] + for elem in constraint.elements + ] + ], + schema=referent_schema + ) + + def _create(self, op_impl): + self._transfer_elements_to_new_table() + + op_impl.prep_table_for_batch(self.table) + op_impl.create_table(self.new_table) + + try: + op_impl._exec( + self.new_table.insert(inline=True).from_select( + list( + k + for k, transfer in self.column_transfers.items() + if "expr" in transfer + ), + select( + [ + transfer["expr"] + for transfer in self.column_transfers.values() + if "expr" in transfer + ] + ), + ) + ) + op_impl.drop_table(self.table) + except: + op_impl.drop_table(self.new_table) + raise + else: + op_impl.rename_table( + self.temp_table_name, self.table.name, schema=self.table.schema + ) + self.new_table.name = self.table.name + try: + for idx in self._gather_indexes_from_both_tables(): + op_impl.create_index(idx) + finally: + self.new_table.name = self.temp_table_name + + def alter_column( + self, + table_name, + column_name, + nullable=None, + server_default=False, + name=None, + type_=None, + autoincrement=None, + **kw + ): + existing = self.columns[column_name] + existing_transfer = self.column_transfers[column_name] + if name is not None and name != column_name: + # note that we don't change '.key' - we keep referring + # to the renamed column by its old key in _create(). neat! + existing.name = name + existing_transfer["name"] = name + + if type_ is not None: + type_ = sqltypes.to_instance(type_) + # old type is being discarded so turn off eventing + # rules. Alternatively we can + # erase the events set up by this type, but this is simpler. + # we also ignore the drop_constraint that will come here from + # Operations.implementation_for(alter_column) + if isinstance(existing.type, SchemaEventTarget): + existing.type._create_events = ( + existing.type.create_constraint + ) = False + + if existing.type._type_affinity is not type_._type_affinity: + existing_transfer["expr"] = cast( + existing_transfer["expr"], type_ + ) + + existing.type = type_ + + # we *dont* however set events for the new type, because + # alter_column is invoked from + # Operations.implementation_for(alter_column) which already + # will emit an add_constraint() + + if nullable is not None: + existing.nullable = nullable + if server_default is not False: + if server_default is None: + existing.server_default = None + else: + sql_schema.DefaultClause(server_default)._set_parent(existing) + if autoincrement is not None: + existing.autoincrement = bool(autoincrement) + + def add_column(self, table_name, column, **kw): + # we copy the column because operations.add_column() + # gives us a Column that is part of a Table already. + self.columns[column.name] = column.copy(schema=self.table.schema) + self.column_transfers[column.name] = {} + + def drop_column(self, table_name, column, **kw): + if column.name in self.table.primary_key.columns: + _remove_column_from_collection( + self.table.primary_key.columns, column + ) + del self.columns[column.name] + del self.column_transfers[column.name] + + def add_constraint(self, const): + if not const.name: + raise ValueError("Constraint must have a name") + if isinstance(const, sql_schema.PrimaryKeyConstraint): + if self.table.primary_key in self.unnamed_constraints: + self.unnamed_constraints.remove(self.table.primary_key) + + self.named_constraints[const.name] = const + + def drop_constraint(self, const): + if not const.name: + raise ValueError("Constraint must have a name") + try: + const = self.named_constraints.pop(const.name) + except KeyError: + if _is_type_bound(const): + # type-bound constraints are only included in the new + # table via their type object in any case, so ignore the + # drop_constraint() that comes here via the + # Operations.implementation_for(alter_column) + return + raise ValueError("No such constraint: '%s'" % const.name) + else: + if isinstance(const, PrimaryKeyConstraint): + for col in const.columns: + self.columns[col.name].primary_key = False + + def create_index(self, idx): + self.new_indexes[idx.name] = idx + + def drop_index(self, idx): + try: + del self.indexes[idx.name] + except KeyError: + raise ValueError("No such index: '%s'" % idx.name) + + def rename_table(self, *arg, **kw): + raise NotImplementedError("TODO") diff --git a/py3/lib/python3.6/site-packages/alembic/operations/ops.py b/py3/lib/python3.6/site-packages/alembic/operations/ops.py new file mode 100644 index 0000000..c3eb469 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/operations/ops.py @@ -0,0 +1,2435 @@ +import re + +from sqlalchemy.types import NULLTYPE + +from . import schemaobj +from .base import BatchOperations +from .base import Operations +from .. import util +from ..util import sqla_compat + + +class MigrateOperation(object): + """base class for migration command and organization objects. + + This system is part of the operation extensibility API. + + .. versionadded:: 0.8.0 + + .. seealso:: + + :ref:`operation_objects` + + :ref:`operation_plugins` + + :ref:`customizing_revision` + + """ + + @util.memoized_property + def info(self): + """A dictionary that may be used to store arbitrary information + along with this :class:`.MigrateOperation` object. + + """ + return {} + + +class AddConstraintOp(MigrateOperation): + """Represent an add constraint operation.""" + + add_constraint_ops = util.Dispatcher() + + @property + def constraint_type(self): + raise NotImplementedError() + + @classmethod + def register_add_constraint(cls, type_): + def go(klass): + cls.add_constraint_ops.dispatch_for(type_)(klass.from_constraint) + return klass + + return go + + @classmethod + def from_constraint(cls, constraint): + return cls.add_constraint_ops.dispatch(constraint.__visit_name__)( + constraint + ) + + def reverse(self): + return DropConstraintOp.from_constraint(self.to_constraint()) + + def to_diff_tuple(self): + return ("add_constraint", self.to_constraint()) + + +@Operations.register_operation("drop_constraint") +@BatchOperations.register_operation("drop_constraint", "batch_drop_constraint") +class DropConstraintOp(MigrateOperation): + """Represent a drop constraint operation.""" + + def __init__( + self, + constraint_name, + table_name, + type_=None, + schema=None, + _orig_constraint=None, + ): + self.constraint_name = constraint_name + self.table_name = table_name + self.constraint_type = type_ + self.schema = schema + self._orig_constraint = _orig_constraint + + def reverse(self): + if self._orig_constraint is None: + raise ValueError( + "operation is not reversible; " + "original constraint is not present" + ) + return AddConstraintOp.from_constraint(self._orig_constraint) + + def to_diff_tuple(self): + if self.constraint_type == "foreignkey": + return ("remove_fk", self.to_constraint()) + else: + return ("remove_constraint", self.to_constraint()) + + @classmethod + def from_constraint(cls, constraint): + types = { + "unique_constraint": "unique", + "foreign_key_constraint": "foreignkey", + "primary_key_constraint": "primary", + "check_constraint": "check", + "column_check_constraint": "check", + } + + constraint_table = sqla_compat._table_for_constraint(constraint) + return cls( + constraint.name, + constraint_table.name, + schema=constraint_table.schema, + type_=types[constraint.__visit_name__], + _orig_constraint=constraint, + ) + + def to_constraint(self): + if self._orig_constraint is not None: + return self._orig_constraint + else: + raise ValueError( + "constraint cannot be produced; " + "original constraint is not present" + ) + + @classmethod + @util._with_legacy_names([("type", "type_"), ("name", "constraint_name")]) + def drop_constraint( + cls, operations, constraint_name, table_name, type_=None, schema=None + ): + r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. + + :param constraint_name: name of the constraint. + :param table_name: table name. + :param type\_: optional, required on MySQL. can be + 'foreignkey', 'primary', 'unique', or 'check'. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + + """ + + op = cls(constraint_name, table_name, type_=type_, schema=schema) + return operations.invoke(op) + + @classmethod + def batch_drop_constraint(cls, operations, constraint_name, type_=None): + """Issue a "drop constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``table_name`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.drop_constraint` + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + + """ + op = cls( + constraint_name, + operations.impl.table_name, + type_=type_, + schema=operations.impl.schema, + ) + return operations.invoke(op) + + +@Operations.register_operation("create_primary_key") +@BatchOperations.register_operation( + "create_primary_key", "batch_create_primary_key" +) +@AddConstraintOp.register_add_constraint("primary_key_constraint") +class CreatePrimaryKeyOp(AddConstraintOp): + """Represent a create primary key operation.""" + + constraint_type = "primarykey" + + def __init__( + self, + constraint_name, + table_name, + columns, + schema=None, + _orig_constraint=None, + **kw + ): + self.constraint_name = constraint_name + self.table_name = table_name + self.columns = columns + self.schema = schema + self._orig_constraint = _orig_constraint + self.kw = kw + + @classmethod + def from_constraint(cls, constraint): + constraint_table = sqla_compat._table_for_constraint(constraint) + + return cls( + constraint.name, + constraint_table.name, + constraint.columns, + schema=constraint_table.schema, + _orig_constraint=constraint, + ) + + def to_constraint(self, migration_context=None): + if self._orig_constraint is not None: + return self._orig_constraint + + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.primary_key_constraint( + self.constraint_name, + self.table_name, + self.columns, + schema=self.schema, + ) + + @classmethod + @util._with_legacy_names( + [("name", "constraint_name"), ("cols", "columns")] + ) + def create_primary_key( + cls, operations, constraint_name, table_name, columns, schema=None + ): + """Issue a "create primary key" instruction using the current + migration context. + + e.g.:: + + from alembic import op + op.create_primary_key( + "pk_my_table", "my_table", + ["id", "version"] + ) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.PrimaryKeyConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param name: Name of the primary key constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions` + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the target table. + :param columns: a list of string column names to be applied to the + primary key constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + * cols -> columns + + """ + op = cls(constraint_name, table_name, columns, schema) + return operations.invoke(op) + + @classmethod + def batch_create_primary_key(cls, operations, constraint_name, columns): + """Issue a "create primary key" instruction using the + current batch migration context. + + The batch form of this call omits the ``table_name`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_primary_key` + + """ + op = cls( + constraint_name, + operations.impl.table_name, + columns, + schema=operations.impl.schema, + ) + return operations.invoke(op) + + +@Operations.register_operation("create_unique_constraint") +@BatchOperations.register_operation( + "create_unique_constraint", "batch_create_unique_constraint" +) +@AddConstraintOp.register_add_constraint("unique_constraint") +class CreateUniqueConstraintOp(AddConstraintOp): + """Represent a create unique constraint operation.""" + + constraint_type = "unique" + + def __init__( + self, + constraint_name, + table_name, + columns, + schema=None, + _orig_constraint=None, + **kw + ): + self.constraint_name = constraint_name + self.table_name = table_name + self.columns = columns + self.schema = schema + self._orig_constraint = _orig_constraint + self.kw = kw + + @classmethod + def from_constraint(cls, constraint): + constraint_table = sqla_compat._table_for_constraint(constraint) + + kw = {} + if constraint.deferrable: + kw["deferrable"] = constraint.deferrable + if constraint.initially: + kw["initially"] = constraint.initially + + return cls( + constraint.name, + constraint_table.name, + [c.name for c in constraint.columns], + schema=constraint_table.schema, + _orig_constraint=constraint, + **kw + ) + + def to_constraint(self, migration_context=None): + if self._orig_constraint is not None: + return self._orig_constraint + + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.unique_constraint( + self.constraint_name, + self.table_name, + self.columns, + schema=self.schema, + **self.kw + ) + + @classmethod + @util._with_legacy_names( + [ + ("name", "constraint_name"), + ("source", "table_name"), + ("local_cols", "columns"), + ] + ) + def create_unique_constraint( + cls, + operations, + constraint_name, + table_name, + columns, + schema=None, + **kw + ): + """Issue a "create unique constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + op.create_unique_constraint("uq_user_name", "user", ["name"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.UniqueConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param name: Name of the unique constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the source table. + :param columns: a list of string column names in the + source table. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + * source -> table_name + * local_cols -> columns + + """ + + op = cls(constraint_name, table_name, columns, schema=schema, **kw) + return operations.invoke(op) + + @classmethod + @util._with_legacy_names([("name", "constraint_name")]) + def batch_create_unique_constraint( + cls, operations, constraint_name, columns, **kw + ): + """Issue a "create unique constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_unique_constraint` + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + + """ + kw["schema"] = operations.impl.schema + op = cls(constraint_name, operations.impl.table_name, columns, **kw) + return operations.invoke(op) + + +@Operations.register_operation("create_foreign_key") +@BatchOperations.register_operation( + "create_foreign_key", "batch_create_foreign_key" +) +@AddConstraintOp.register_add_constraint("foreign_key_constraint") +class CreateForeignKeyOp(AddConstraintOp): + """Represent a create foreign key constraint operation.""" + + constraint_type = "foreignkey" + + def __init__( + self, + constraint_name, + source_table, + referent_table, + local_cols, + remote_cols, + _orig_constraint=None, + **kw + ): + self.constraint_name = constraint_name + self.source_table = source_table + self.referent_table = referent_table + self.local_cols = local_cols + self.remote_cols = remote_cols + self._orig_constraint = _orig_constraint + self.kw = kw + + def to_diff_tuple(self): + return ("add_fk", self.to_constraint()) + + @classmethod + def from_constraint(cls, constraint): + kw = {} + if constraint.onupdate: + kw["onupdate"] = constraint.onupdate + if constraint.ondelete: + kw["ondelete"] = constraint.ondelete + if constraint.initially: + kw["initially"] = constraint.initially + if constraint.deferrable: + kw["deferrable"] = constraint.deferrable + if constraint.use_alter: + kw["use_alter"] = constraint.use_alter + + ( + source_schema, + source_table, + source_columns, + target_schema, + target_table, + target_columns, + onupdate, + ondelete, + deferrable, + initially, + ) = sqla_compat._fk_spec(constraint) + + kw["source_schema"] = source_schema + kw["referent_schema"] = target_schema + + return cls( + constraint.name, + source_table, + target_table, + source_columns, + target_columns, + _orig_constraint=constraint, + **kw + ) + + def to_constraint(self, migration_context=None): + if self._orig_constraint is not None: + return self._orig_constraint + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.foreign_key_constraint( + self.constraint_name, + self.source_table, + self.referent_table, + self.local_cols, + self.remote_cols, + **self.kw + ) + + @classmethod + @util._with_legacy_names( + [ + ("name", "constraint_name"), + ("source", "source_table"), + ("referent", "referent_table"), + ] + ) + def create_foreign_key( + cls, + operations, + constraint_name, + source_table, + referent_table, + local_cols, + remote_cols, + onupdate=None, + ondelete=None, + deferrable=None, + initially=None, + match=None, + source_schema=None, + referent_schema=None, + **dialect_kw + ): + """Issue a "create foreign key" instruction using the + current migration context. + + e.g.:: + + from alembic import op + op.create_foreign_key( + "fk_user_address", "address", + "user", ["user_id"], ["id"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.ForeignKeyConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param name: Name of the foreign key constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param source_table: String name of the source table. + :param referent_table: String name of the destination table. + :param local_cols: a list of string column names in the + source table. + :param remote_cols: a list of string column names in the + remote table. + :param onupdate: Optional string. If set, emit ON UPDATE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param ondelete: Optional string. If set, emit ON DELETE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param deferrable: optional bool. If set, emit DEFERRABLE or NOT + DEFERRABLE when issuing DDL for this constraint. + :param source_schema: Optional schema name of the source table. + :param referent_schema: Optional schema name of the destination table. + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + * source -> source_table + * referent -> referent_table + + """ + + op = cls( + constraint_name, + source_table, + referent_table, + local_cols, + remote_cols, + onupdate=onupdate, + ondelete=ondelete, + deferrable=deferrable, + source_schema=source_schema, + referent_schema=referent_schema, + initially=initially, + match=match, + **dialect_kw + ) + return operations.invoke(op) + + @classmethod + @util._with_legacy_names( + [("name", "constraint_name"), ("referent", "referent_table")] + ) + def batch_create_foreign_key( + cls, + operations, + constraint_name, + referent_table, + local_cols, + remote_cols, + referent_schema=None, + onupdate=None, + ondelete=None, + deferrable=None, + initially=None, + match=None, + **dialect_kw + ): + """Issue a "create foreign key" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``source_schema`` + arguments from the call. + + e.g.:: + + with batch_alter_table("address") as batch_op: + batch_op.create_foreign_key( + "fk_user_address", + "user", ["user_id"], ["id"]) + + .. seealso:: + + :meth:`.Operations.create_foreign_key` + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + * referent -> referent_table + + """ + op = cls( + constraint_name, + operations.impl.table_name, + referent_table, + local_cols, + remote_cols, + onupdate=onupdate, + ondelete=ondelete, + deferrable=deferrable, + source_schema=operations.impl.schema, + referent_schema=referent_schema, + initially=initially, + match=match, + **dialect_kw + ) + return operations.invoke(op) + + +@Operations.register_operation("create_check_constraint") +@BatchOperations.register_operation( + "create_check_constraint", "batch_create_check_constraint" +) +@AddConstraintOp.register_add_constraint("check_constraint") +@AddConstraintOp.register_add_constraint("column_check_constraint") +class CreateCheckConstraintOp(AddConstraintOp): + """Represent a create check constraint operation.""" + + constraint_type = "check" + + def __init__( + self, + constraint_name, + table_name, + condition, + schema=None, + _orig_constraint=None, + **kw + ): + self.constraint_name = constraint_name + self.table_name = table_name + self.condition = condition + self.schema = schema + self._orig_constraint = _orig_constraint + self.kw = kw + + @classmethod + def from_constraint(cls, constraint): + constraint_table = sqla_compat._table_for_constraint(constraint) + + return cls( + constraint.name, + constraint_table.name, + constraint.sqltext, + schema=constraint_table.schema, + _orig_constraint=constraint, + ) + + def to_constraint(self, migration_context=None): + if self._orig_constraint is not None: + return self._orig_constraint + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.check_constraint( + self.constraint_name, + self.table_name, + self.condition, + schema=self.schema, + **self.kw + ) + + @classmethod + @util._with_legacy_names( + [("name", "constraint_name"), ("source", "table_name")] + ) + def create_check_constraint( + cls, + operations, + constraint_name, + table_name, + condition, + schema=None, + **kw + ): + """Issue a "create check constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + from sqlalchemy.sql import column, func + + op.create_check_constraint( + "ck_user_name_len", + "user", + func.len(column('name')) > 5 + ) + + CHECK constraints are usually against a SQL expression, so ad-hoc + table metadata is usually needed. The function will convert the given + arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound + to an anonymous table in order to emit the CREATE statement. + + :param name: Name of the check constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the source table. + :param condition: SQL expression that's the condition of the + constraint. Can be a string or SQLAlchemy expression language + structure. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + * source -> table_name + + """ + op = cls(constraint_name, table_name, condition, schema=schema, **kw) + return operations.invoke(op) + + @classmethod + @util._with_legacy_names([("name", "constraint_name")]) + def batch_create_check_constraint( + cls, operations, constraint_name, condition, **kw + ): + """Issue a "create check constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_check_constraint` + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> constraint_name + + """ + op = cls( + constraint_name, + operations.impl.table_name, + condition, + schema=operations.impl.schema, + **kw + ) + return operations.invoke(op) + + +@Operations.register_operation("create_index") +@BatchOperations.register_operation("create_index", "batch_create_index") +class CreateIndexOp(MigrateOperation): + """Represent a create index operation.""" + + def __init__( + self, + index_name, + table_name, + columns, + schema=None, + unique=False, + _orig_index=None, + **kw + ): + self.index_name = index_name + self.table_name = table_name + self.columns = columns + self.schema = schema + self.unique = unique + self.kw = kw + self._orig_index = _orig_index + + def reverse(self): + return DropIndexOp.from_index(self.to_index()) + + def to_diff_tuple(self): + return ("add_index", self.to_index()) + + @classmethod + def from_index(cls, index): + return cls( + index.name, + index.table.name, + sqla_compat._get_index_expressions(index), + schema=index.table.schema, + unique=index.unique, + _orig_index=index, + **index.kwargs + ) + + def to_index(self, migration_context=None): + if self._orig_index: + return self._orig_index + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.index( + self.index_name, + self.table_name, + self.columns, + schema=self.schema, + unique=self.unique, + **self.kw + ) + + @classmethod + @util._with_legacy_names([("name", "index_name")]) + def create_index( + cls, + operations, + index_name, + table_name, + columns, + schema=None, + unique=False, + **kw + ): + r"""Issue a "create index" instruction using the current + migration context. + + e.g.:: + + from alembic import op + op.create_index('ik_test', 't1', ['foo', 'bar']) + + Functional indexes can be produced by using the + :func:`sqlalchemy.sql.expression.text` construct:: + + from alembic import op + from sqlalchemy import text + op.create_index('ik_test', 't1', [text('lower(foo)')]) + + .. versionadded:: 0.6.7 support for making use of the + :func:`~sqlalchemy.sql.expression.text` construct in + conjunction with + :meth:`.Operations.create_index` in + order to produce functional expressions within CREATE INDEX. + + :param index_name: name of the index. + :param table_name: name of the owning table. + :param columns: a list consisting of string column names and/or + :func:`~sqlalchemy.sql.expression.text` constructs. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + :param unique: If True, create a unique index. + + :param quote: + Force quoting of this column's name on or off, corresponding + to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> index_name + + """ + op = cls( + index_name, table_name, columns, schema=schema, unique=unique, **kw + ) + return operations.invoke(op) + + @classmethod + def batch_create_index(cls, operations, index_name, columns, **kw): + """Issue a "create index" instruction using the + current batch migration context. + + .. seealso:: + + :meth:`.Operations.create_index` + + """ + + op = cls( + index_name, + operations.impl.table_name, + columns, + schema=operations.impl.schema, + **kw + ) + return operations.invoke(op) + + +@Operations.register_operation("drop_index") +@BatchOperations.register_operation("drop_index", "batch_drop_index") +class DropIndexOp(MigrateOperation): + """Represent a drop index operation.""" + + def __init__( + self, index_name, table_name=None, schema=None, _orig_index=None, **kw + ): + self.index_name = index_name + self.table_name = table_name + self.schema = schema + self._orig_index = _orig_index + self.kw = kw + + def to_diff_tuple(self): + return ("remove_index", self.to_index()) + + def reverse(self): + if self._orig_index is None: + raise ValueError( + "operation is not reversible; " "original index is not present" + ) + return CreateIndexOp.from_index(self._orig_index) + + @classmethod + def from_index(cls, index): + return cls( + index.name, + index.table.name, + schema=index.table.schema, + _orig_index=index, + **index.kwargs + ) + + def to_index(self, migration_context=None): + if self._orig_index is not None: + return self._orig_index + + schema_obj = schemaobj.SchemaObjects(migration_context) + + # need a dummy column name here since SQLAlchemy + # 0.7.6 and further raises on Index with no columns + return schema_obj.index( + self.index_name, + self.table_name, + ["x"], + schema=self.schema, + **self.kw + ) + + @classmethod + @util._with_legacy_names( + [("name", "index_name"), ("tablename", "table_name")] + ) + def drop_index( + cls, operations, index_name, table_name=None, schema=None, **kw + ): + r"""Issue a "drop index" instruction using the current + migration context. + + e.g.:: + + drop_index("accounts") + + :param index_name: name of the index. + :param table_name: name of the owning table. Some + backends such as Microsoft SQL Server require this. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + .. versionadded:: 0.9.5 Support for dialect-specific keyword + arguments for DROP INDEX + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> index_name + + """ + op = cls(index_name, table_name=table_name, schema=schema, **kw) + return operations.invoke(op) + + @classmethod + @util._with_legacy_names([("name", "index_name")]) + def batch_drop_index(cls, operations, index_name, **kw): + """Issue a "drop index" instruction using the + current batch migration context. + + .. seealso:: + + :meth:`.Operations.drop_index` + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> index_name + + """ + + op = cls( + index_name, + table_name=operations.impl.table_name, + schema=operations.impl.schema, + **kw + ) + return operations.invoke(op) + + +@Operations.register_operation("create_table") +class CreateTableOp(MigrateOperation): + """Represent a create table operation.""" + + def __init__( + self, table_name, columns, schema=None, _orig_table=None, **kw + ): + self.table_name = table_name + self.columns = columns + self.schema = schema + self.kw = kw + self._orig_table = _orig_table + + def reverse(self): + return DropTableOp.from_table(self.to_table()) + + def to_diff_tuple(self): + return ("add_table", self.to_table()) + + @classmethod + def from_table(cls, table): + return cls( + table.name, + list(table.c) + list(table.constraints), + schema=table.schema, + _orig_table=table, + **table.kwargs + ) + + def to_table(self, migration_context=None): + if self._orig_table is not None: + return self._orig_table + schema_obj = schemaobj.SchemaObjects(migration_context) + + return schema_obj.table( + self.table_name, *self.columns, schema=self.schema, **self.kw + ) + + @classmethod + @util._with_legacy_names([("name", "table_name")]) + def create_table(cls, operations, table_name, *columns, **kw): + r"""Issue a "create table" instruction using the current migration + context. + + This directive receives an argument list similar to that of the + traditional :class:`sqlalchemy.schema.Table` construct, but without the + metadata:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + op.create_table( + 'account', + Column('id', INTEGER, primary_key=True), + Column('name', VARCHAR(50), nullable=False), + Column('description', NVARCHAR(200)), + Column('timestamp', TIMESTAMP, server_default=func.now()) + ) + + Note that :meth:`.create_table` accepts + :class:`~sqlalchemy.schema.Column` + constructs directly from the SQLAlchemy library. In particular, + default values to be created on the database side are + specified using the ``server_default`` parameter, and not + ``default`` which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the "timestamp" column + op.create_table('account', + Column('id', INTEGER, primary_key=True), + Column('timestamp', TIMESTAMP, server_default=func.now()) + ) + + The function also returns a newly created + :class:`~sqlalchemy.schema.Table` object, corresponding to the table + specification given, which is suitable for + immediate SQL operations, in particular + :meth:`.Operations.bulk_insert`:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + account_table = op.create_table( + 'account', + Column('id', INTEGER, primary_key=True), + Column('name', VARCHAR(50), nullable=False), + Column('description', NVARCHAR(200)), + Column('timestamp', TIMESTAMP, server_default=func.now()) + ) + + op.bulk_insert( + account_table, + [ + {"name": "A1", "description": "account 1"}, + {"name": "A2", "description": "account 2"}, + ] + ) + + .. versionadded:: 0.7.0 + + :param table_name: Name of the table + :param \*columns: collection of :class:`~sqlalchemy.schema.Column` + objects within + the table, as well as optional :class:`~sqlalchemy.schema.Constraint` + objects + and :class:`~.sqlalchemy.schema.Index` objects. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + :return: the :class:`~sqlalchemy.schema.Table` object corresponding + to the parameters given. + + .. versionadded:: 0.7.0 - the :class:`~sqlalchemy.schema.Table` + object is returned. + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> table_name + + """ + op = cls(table_name, columns, **kw) + return operations.invoke(op) + + +@Operations.register_operation("drop_table") +class DropTableOp(MigrateOperation): + """Represent a drop table operation.""" + + def __init__( + self, table_name, schema=None, table_kw=None, _orig_table=None + ): + self.table_name = table_name + self.schema = schema + self.table_kw = table_kw or {} + self._orig_table = _orig_table + + def to_diff_tuple(self): + return ("remove_table", self.to_table()) + + def reverse(self): + if self._orig_table is None: + raise ValueError( + "operation is not reversible; " "original table is not present" + ) + return CreateTableOp.from_table(self._orig_table) + + @classmethod + def from_table(cls, table): + return cls(table.name, schema=table.schema, _orig_table=table) + + def to_table(self, migration_context=None): + if self._orig_table is not None: + return self._orig_table + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.table( + self.table_name, schema=self.schema, **self.table_kw + ) + + @classmethod + @util._with_legacy_names([("name", "table_name")]) + def drop_table(cls, operations, table_name, schema=None, **kw): + r"""Issue a "drop table" instruction using the current + migration context. + + + e.g.:: + + drop_table("accounts") + + :param table_name: Name of the table + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + .. versionchanged:: 0.8.0 The following positional argument names + have been changed: + + * name -> table_name + + """ + op = cls(table_name, schema=schema, table_kw=kw) + operations.invoke(op) + + +class AlterTableOp(MigrateOperation): + """Represent an alter table operation.""" + + def __init__(self, table_name, schema=None): + self.table_name = table_name + self.schema = schema + + +@Operations.register_operation("rename_table") +class RenameTableOp(AlterTableOp): + """Represent a rename table operation.""" + + def __init__(self, old_table_name, new_table_name, schema=None): + super(RenameTableOp, self).__init__(old_table_name, schema=schema) + self.new_table_name = new_table_name + + @classmethod + def rename_table( + cls, operations, old_table_name, new_table_name, schema=None + ): + """Emit an ALTER TABLE to rename a table. + + :param old_table_name: old name. + :param new_table_name: new name. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + """ + op = cls(old_table_name, new_table_name, schema=schema) + return operations.invoke(op) + + +@Operations.register_operation("create_table_comment") +class CreateTableCommentOp(AlterTableOp): + """Represent a COMMENT ON `table` operation. + """ + + def __init__( + self, table_name, comment, schema=None, existing_comment=None + ): + self.table_name = table_name + self.comment = comment + self.existing_comment = existing_comment + self.schema = schema + + @classmethod + def create_table_comment( + cls, + operations, + table_name, + comment, + existing_comment=None, + schema=None, + ): + """Emit a COMMENT ON operation to set the comment for a table. + + .. versionadded:: 1.0.6 + + :param table_name: string name of the target table. + :param comment: string value of the comment being registered against + the specified table. + :param existing_comment: String value of a comment + already registered on the specified table, used within autogenerate + so that the operation is reversible, but not required for direct + use. + + .. seealso:: + + :meth:`.Operations.drop_table_comment` + + :paramref:`.Operations.alter_column.comment` + + """ + + op = cls( + table_name, + comment, + existing_comment=existing_comment, + schema=schema, + ) + return operations.invoke(op) + + def reverse(self): + """Reverses the COMMENT ON operation against a table. + """ + if self.existing_comment is None: + return DropTableCommentOp( + self.table_name, + existing_comment=self.comment, + schema=self.schema, + ) + else: + return CreateTableCommentOp( + self.table_name, + self.existing_comment, + existing_comment=self.comment, + schema=self.schema, + ) + + def to_table(self, migration_context=None): + schema_obj = schemaobj.SchemaObjects(migration_context) + + return schema_obj.table( + self.table_name, schema=self.schema, comment=self.comment + ) + + def to_diff_tuple(self): + return ("add_table_comment", self.to_table(), self.existing_comment) + + +@Operations.register_operation("drop_table_comment") +class DropTableCommentOp(AlterTableOp): + """Represent an operation to remove the comment from a table. + """ + + def __init__(self, table_name, schema=None, existing_comment=None): + self.table_name = table_name + self.existing_comment = existing_comment + self.schema = schema + + @classmethod + def drop_table_comment( + cls, operations, table_name, existing_comment=None, schema=None + ): + """Issue a "drop table comment" operation to + remove an existing comment set on a table. + + .. versionadded:: 1.0.6 + + :param table_name: string name of the target table. + :param existing_comment: An optional string value of a comment already + registered on the specified table. + + .. seealso:: + + :meth:`.Operations.create_table_comment` + + :paramref:`.Operations.alter_column.comment` + + """ + + op = cls(table_name, existing_comment=existing_comment, schema=schema) + return operations.invoke(op) + + def reverse(self): + """Reverses the COMMENT ON operation against a table. + """ + return CreateTableCommentOp( + self.table_name, self.existing_comment, schema=self.schema + ) + + def to_table(self, migration_context=None): + schema_obj = schemaobj.SchemaObjects(migration_context) + + return schema_obj.table(self.table_name, schema=self.schema) + + def to_diff_tuple(self): + return ("remove_table_comment", self.to_table()) + + +@Operations.register_operation("alter_column") +@BatchOperations.register_operation("alter_column", "batch_alter_column") +class AlterColumnOp(AlterTableOp): + """Represent an alter column operation.""" + + def __init__( + self, + table_name, + column_name, + schema=None, + existing_type=None, + existing_server_default=False, + existing_nullable=None, + existing_comment=None, + modify_nullable=None, + modify_comment=False, + modify_server_default=False, + modify_name=None, + modify_type=None, + **kw + ): + super(AlterColumnOp, self).__init__(table_name, schema=schema) + self.column_name = column_name + self.existing_type = existing_type + self.existing_server_default = existing_server_default + self.existing_nullable = existing_nullable + self.existing_comment = existing_comment + self.modify_nullable = modify_nullable + self.modify_comment = modify_comment + self.modify_server_default = modify_server_default + self.modify_name = modify_name + self.modify_type = modify_type + self.kw = kw + + def to_diff_tuple(self): + col_diff = [] + schema, tname, cname = self.schema, self.table_name, self.column_name + + if self.modify_type is not None: + col_diff.append( + ( + "modify_type", + schema, + tname, + cname, + { + "existing_nullable": self.existing_nullable, + "existing_server_default": ( + self.existing_server_default + ), + "existing_comment": self.existing_comment, + }, + self.existing_type, + self.modify_type, + ) + ) + + if self.modify_nullable is not None: + col_diff.append( + ( + "modify_nullable", + schema, + tname, + cname, + { + "existing_type": self.existing_type, + "existing_server_default": ( + self.existing_server_default + ), + "existing_comment": self.existing_comment, + }, + self.existing_nullable, + self.modify_nullable, + ) + ) + + if self.modify_server_default is not False: + col_diff.append( + ( + "modify_default", + schema, + tname, + cname, + { + "existing_nullable": self.existing_nullable, + "existing_type": self.existing_type, + "existing_comment": self.existing_comment, + }, + self.existing_server_default, + self.modify_server_default, + ) + ) + + if self.modify_comment is not False: + col_diff.append( + ( + "modify_comment", + schema, + tname, + cname, + { + "existing_nullable": self.existing_nullable, + "existing_type": self.existing_type, + "existing_server_default": ( + self.existing_server_default + ), + }, + self.existing_comment, + self.modify_comment, + ) + ) + + return col_diff + + def has_changes(self): + hc1 = ( + self.modify_nullable is not None + or self.modify_server_default is not False + or self.modify_type is not None + or self.modify_comment is not False + ) + if hc1: + return True + for kw in self.kw: + if kw.startswith("modify_"): + return True + else: + return False + + def reverse(self): + + kw = self.kw.copy() + kw["existing_type"] = self.existing_type + kw["existing_nullable"] = self.existing_nullable + kw["existing_server_default"] = self.existing_server_default + kw["existing_comment"] = self.existing_comment + if self.modify_type is not None: + kw["modify_type"] = self.modify_type + if self.modify_nullable is not None: + kw["modify_nullable"] = self.modify_nullable + if self.modify_server_default is not False: + kw["modify_server_default"] = self.modify_server_default + if self.modify_comment is not False: + kw["modify_comment"] = self.modify_comment + + # TODO: make this a little simpler + all_keys = set( + m.group(1) + for m in [re.match(r"^(?:existing_|modify_)(.+)$", k) for k in kw] + if m + ) + + for k in all_keys: + if "modify_%s" % k in kw: + swap = kw["existing_%s" % k] + kw["existing_%s" % k] = kw["modify_%s" % k] + kw["modify_%s" % k] = swap + + return self.__class__( + self.table_name, self.column_name, schema=self.schema, **kw + ) + + @classmethod + @util._with_legacy_names([("name", "new_column_name")]) + def alter_column( + cls, + operations, + table_name, + column_name, + nullable=None, + comment=False, + server_default=False, + new_column_name=None, + type_=None, + existing_type=None, + existing_server_default=False, + existing_nullable=None, + existing_comment=None, + schema=None, + **kw + ): + r"""Issue an "alter column" instruction using the + current migration context. + + Generally, only that aspect of the column which + is being changed, i.e. name, type, nullability, + default, needs to be specified. Multiple changes + can also be specified at once and the backend should + "do the right thing", emitting each change either + separately or together as the backend allows. + + MySQL has special requirements here, since MySQL + cannot ALTER a column without a full specification. + When producing MySQL-compatible migration files, + it is recommended that the ``existing_type``, + ``existing_server_default``, and ``existing_nullable`` + parameters be present, if not being altered. + + Type changes which are against the SQLAlchemy + "schema" types :class:`~sqlalchemy.types.Boolean` + and :class:`~sqlalchemy.types.Enum` may also + add or drop constraints which accompany those + types on backends that don't support them natively. + The ``existing_type`` argument is + used in this case to identify and remove a previous + constraint that was bound to the type object. + + :param table_name: string name of the target table. + :param column_name: string name of the target column, + as it exists before the operation begins. + :param nullable: Optional; specify ``True`` or ``False`` + to alter the column's nullability. + :param server_default: Optional; specify a string + SQL expression, :func:`~sqlalchemy.sql.expression.text`, + or :class:`~sqlalchemy.schema.DefaultClause` to indicate + an alteration to the column's default value. + Set to ``None`` to have the default removed. + :param comment: optional string text of a new comment to add to the + column. + + .. versionadded:: 1.0.6 + + :param new_column_name: Optional; specify a string name here to + indicate the new name within a column rename operation. + :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` + type object to specify a change to the column's type. + For SQLAlchemy types that also indicate a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), + the constraint is also generated. + :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; + currently understood by the MySQL dialect. + :param existing_type: Optional; a + :class:`~sqlalchemy.types.TypeEngine` + type object to specify the previous type. This + is required for all MySQL column alter operations that + don't otherwise specify a new type, as well as for + when nullability is being changed on a SQL Server + column. It is also used if the type is a so-called + SQLlchemy "schema" type which may define a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, + :class:`~sqlalchemy.types.Enum`), + so that the constraint can be dropped. + :param existing_server_default: Optional; The existing + default value of the column. Required on MySQL if + an existing default is not being changed; else MySQL + removes the default. + :param existing_nullable: Optional; the existing nullability + of the column. Required on MySQL if the existing nullability + is not being changed; else MySQL sets this to NULL. + :param existing_autoincrement: Optional; the existing autoincrement + of the column. Used for MySQL's system of altering a column + that specifies ``AUTO_INCREMENT``. + :param existing_comment: string text of the existing comment on the + column to be maintained. Required on MySQL if the existing comment + on the column is not being changed. + + .. versionadded:: 1.0.6 + + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + :param postgresql_using: String argument which will indicate a + SQL expression to render within the Postgresql-specific USING clause + within ALTER COLUMN. This string is taken directly as raw SQL which + must explicitly include any necessary quoting or escaping of tokens + within the expression. + + .. versionadded:: 0.8.8 + + """ + + alt = cls( + table_name, + column_name, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + modify_name=new_column_name, + modify_type=type_, + modify_server_default=server_default, + modify_nullable=nullable, + modify_comment=comment, + **kw + ) + + return operations.invoke(alt) + + @classmethod + def batch_alter_column( + cls, + operations, + column_name, + nullable=None, + comment=False, + server_default=False, + new_column_name=None, + type_=None, + existing_type=None, + existing_server_default=False, + existing_nullable=None, + existing_comment=None, + **kw + ): + """Issue an "alter column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.alter_column` + + """ + alt = cls( + operations.impl.table_name, + column_name, + schema=operations.impl.schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + modify_name=new_column_name, + modify_type=type_, + modify_server_default=server_default, + modify_nullable=nullable, + modify_comment=comment, + **kw + ) + + return operations.invoke(alt) + + +@Operations.register_operation("add_column") +@BatchOperations.register_operation("add_column", "batch_add_column") +class AddColumnOp(AlterTableOp): + """Represent an add column operation.""" + + def __init__(self, table_name, column, schema=None): + super(AddColumnOp, self).__init__(table_name, schema=schema) + self.column = column + + def reverse(self): + return DropColumnOp.from_column_and_tablename( + self.schema, self.table_name, self.column + ) + + def to_diff_tuple(self): + return ("add_column", self.schema, self.table_name, self.column) + + def to_column(self): + return self.column + + @classmethod + def from_column(cls, col): + return cls(col.table.name, col, schema=col.table.schema) + + @classmethod + def from_column_and_tablename(cls, schema, tname, col): + return cls(tname, col, schema=schema) + + @classmethod + def add_column(cls, operations, table_name, column, schema=None): + """Issue an "add column" instruction using the current + migration context. + + e.g.:: + + from alembic import op + from sqlalchemy import Column, String + + op.add_column('organization', + Column('name', String()) + ) + + The provided :class:`~sqlalchemy.schema.Column` object can also + specify a :class:`~sqlalchemy.schema.ForeignKey`, referencing + a remote table name. Alembic will automatically generate a stub + "referenced" table and emit a second ALTER statement in order + to add the constraint separately:: + + from alembic import op + from sqlalchemy import Column, INTEGER, ForeignKey + + op.add_column('organization', + Column('account_id', INTEGER, ForeignKey('accounts.id')) + ) + + Note that this statement uses the :class:`~sqlalchemy.schema.Column` + construct as is from the SQLAlchemy library. In particular, + default values to be created on the database side are + specified using the ``server_default`` parameter, and not + ``default`` which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the column add + op.add_column('account', + Column('timestamp', TIMESTAMP, server_default=func.now()) + ) + + :param table_name: String name of the parent table. + :param column: a :class:`sqlalchemy.schema.Column` object + representing the new column. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + + """ + + op = cls(table_name, column, schema=schema) + return operations.invoke(op) + + @classmethod + def batch_add_column(cls, operations, column): + """Issue an "add column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.add_column` + + """ + op = cls( + operations.impl.table_name, column, schema=operations.impl.schema + ) + return operations.invoke(op) + + +@Operations.register_operation("drop_column") +@BatchOperations.register_operation("drop_column", "batch_drop_column") +class DropColumnOp(AlterTableOp): + """Represent a drop column operation.""" + + def __init__( + self, table_name, column_name, schema=None, _orig_column=None, **kw + ): + super(DropColumnOp, self).__init__(table_name, schema=schema) + self.column_name = column_name + self.kw = kw + self._orig_column = _orig_column + + def to_diff_tuple(self): + return ( + "remove_column", + self.schema, + self.table_name, + self.to_column(), + ) + + def reverse(self): + if self._orig_column is None: + raise ValueError( + "operation is not reversible; " + "original column is not present" + ) + + return AddColumnOp.from_column_and_tablename( + self.schema, self.table_name, self._orig_column + ) + + @classmethod + def from_column_and_tablename(cls, schema, tname, col): + return cls(tname, col.name, schema=schema, _orig_column=col) + + def to_column(self, migration_context=None): + if self._orig_column is not None: + return self._orig_column + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.column(self.column_name, NULLTYPE) + + @classmethod + def drop_column( + cls, operations, table_name, column_name, schema=None, **kw + ): + """Issue a "drop column" instruction using the current + migration context. + + e.g.:: + + drop_column('organization', 'account_id') + + :param table_name: name of table + :param column_name: name of column + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + .. versionadded:: 0.7.0 'schema' can now accept a + :class:`~sqlalchemy.sql.elements.quoted_name` construct. + + :param mssql_drop_check: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the CHECK constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.check_constraints, + then exec's a separate DROP CONSTRAINT for that constraint. + :param mssql_drop_default: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the DEFAULT constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.default_constraints, + then exec's a separate DROP CONSTRAINT for that default. + :param mssql_drop_foreign_key: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop a single FOREIGN KEY constraint on the column using a + SQL-script-compatible + block that selects into a @variable from + sys.foreign_keys/sys.foreign_key_columns, + then exec's a separate DROP CONSTRAINT for that default. Only + works if the column has exactly one FK constraint which refers to + it, at the moment. + + .. versionadded:: 0.6.2 + + """ + + op = cls(table_name, column_name, schema=schema, **kw) + return operations.invoke(op) + + @classmethod + def batch_drop_column(cls, operations, column_name, **kw): + """Issue a "drop column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.drop_column` + + """ + op = cls( + operations.impl.table_name, + column_name, + schema=operations.impl.schema, + **kw + ) + return operations.invoke(op) + + +@Operations.register_operation("bulk_insert") +class BulkInsertOp(MigrateOperation): + """Represent a bulk insert operation.""" + + def __init__(self, table, rows, multiinsert=True): + self.table = table + self.rows = rows + self.multiinsert = multiinsert + + @classmethod + def bulk_insert(cls, operations, table, rows, multiinsert=True): + """Issue a "bulk insert" operation using the current + migration context. + + This provides a means of representing an INSERT of multiple rows + which works equally well in the context of executing on a live + connection as well as that of generating a SQL script. In the + case of a SQL script, the values are rendered inline into the + statement. + + e.g.:: + + from alembic import op + from datetime import date + from sqlalchemy.sql import table, column + from sqlalchemy import String, Integer, Date + + # Create an ad-hoc table to use for the insert statement. + accounts_table = table('account', + column('id', Integer), + column('name', String), + column('create_date', Date) + ) + + op.bulk_insert(accounts_table, + [ + {'id':1, 'name':'John Smith', + 'create_date':date(2010, 10, 5)}, + {'id':2, 'name':'Ed Williams', + 'create_date':date(2007, 5, 27)}, + {'id':3, 'name':'Wendy Jones', + 'create_date':date(2008, 8, 15)}, + ] + ) + + When using --sql mode, some datatypes may not render inline + automatically, such as dates and other special types. When this + issue is present, :meth:`.Operations.inline_literal` may be used:: + + op.bulk_insert(accounts_table, + [ + {'id':1, 'name':'John Smith', + 'create_date':op.inline_literal("2010-10-05")}, + {'id':2, 'name':'Ed Williams', + 'create_date':op.inline_literal("2007-05-27")}, + {'id':3, 'name':'Wendy Jones', + 'create_date':op.inline_literal("2008-08-15")}, + ], + multiinsert=False + ) + + When using :meth:`.Operations.inline_literal` in conjunction with + :meth:`.Operations.bulk_insert`, in order for the statement to work + in "online" (e.g. non --sql) mode, the + :paramref:`~.Operations.bulk_insert.multiinsert` + flag should be set to ``False``, which will have the effect of + individual INSERT statements being emitted to the database, each + with a distinct VALUES clause, so that the "inline" values can + still be rendered, rather than attempting to pass the values + as bound parameters. + + .. versionadded:: 0.6.4 :meth:`.Operations.inline_literal` can now + be used with :meth:`.Operations.bulk_insert`, and the + :paramref:`~.Operations.bulk_insert.multiinsert` flag has + been added to assist in this usage when running in "online" + mode. + + :param table: a table object which represents the target of the INSERT. + + :param rows: a list of dictionaries indicating rows. + + :param multiinsert: when at its default of True and --sql mode is not + enabled, the INSERT statement will be executed using + "executemany()" style, where all elements in the list of + dictionaries are passed as bound parameters in a single + list. Setting this to False results in individual INSERT + statements being emitted per parameter set, and is needed + in those cases where non-literal values are present in the + parameter sets. + + .. versionadded:: 0.6.4 + + """ + + op = cls(table, rows, multiinsert=multiinsert) + operations.invoke(op) + + +@Operations.register_operation("execute") +class ExecuteSQLOp(MigrateOperation): + """Represent an execute SQL operation.""" + + def __init__(self, sqltext, execution_options=None): + self.sqltext = sqltext + self.execution_options = execution_options + + @classmethod + def execute(cls, operations, sqltext, execution_options=None): + r"""Execute the given SQL using the current migration context. + + The given SQL can be a plain string, e.g.:: + + op.execute("INSERT INTO table (foo) VALUES ('some value')") + + Or it can be any kind of Core SQL Expression construct, such as + below where we use an update construct:: + + from sqlalchemy.sql import table, column + from sqlalchemy import String + from alembic import op + + account = table('account', + column('name', String) + ) + op.execute( + account.update().\\ + where(account.c.name==op.inline_literal('account 1')).\\ + values({'name':op.inline_literal('account 2')}) + ) + + Above, we made use of the SQLAlchemy + :func:`sqlalchemy.sql.expression.table` and + :func:`sqlalchemy.sql.expression.column` constructs to make a brief, + ad-hoc table construct just for our UPDATE statement. A full + :class:`~sqlalchemy.schema.Table` construct of course works perfectly + fine as well, though note it's a recommended practice to at least + ensure the definition of a table is self-contained within the migration + script, rather than imported from a module that may break compatibility + with older migrations. + + In a SQL script context, the statement is emitted directly to the + output stream. There is *no* return result, however, as this + function is oriented towards generating a change script + that can run in "offline" mode. Additionally, parameterized + statements are discouraged here, as they *will not work* in offline + mode. Above, we use :meth:`.inline_literal` where parameters are + to be used. + + For full interaction with a connected database where parameters can + also be used normally, use the "bind" available from the context:: + + from alembic import op + connection = op.get_bind() + + connection.execute( + account.update().where(account.c.name=='account 1'). + values({"name": "account 2"}) + ) + + Additionally, when passing the statement as a plain string, it is first + coerceed into a :func:`sqlalchemy.sql.expression.text` construct + before being passed along. In the less likely case that the + literal SQL string contains a colon, it must be escaped with a + backslash, as:: + + op.execute("INSERT INTO table (foo) VALUES ('\:colon_value')") + + + :param sql: Any legal SQLAlchemy expression, including: + + * a string + * a :func:`sqlalchemy.sql.expression.text` construct. + * a :func:`sqlalchemy.sql.expression.insert` construct. + * a :func:`sqlalchemy.sql.expression.update`, + :func:`sqlalchemy.sql.expression.insert`, + or :func:`sqlalchemy.sql.expression.delete` construct. + * Pretty much anything that's "executable" as described + in :ref:`sqlexpression_toplevel`. + + .. note:: when passing a plain string, the statement is coerced into + a :func:`sqlalchemy.sql.expression.text` construct. This construct + considers symbols with colons, e.g. ``:foo`` to be bound parameters. + To avoid this, ensure that colon symbols are escaped, e.g. + ``\:foo``. + + :param execution_options: Optional dictionary of + execution options, will be passed to + :meth:`sqlalchemy.engine.Connection.execution_options`. + """ + op = cls(sqltext, execution_options=execution_options) + return operations.invoke(op) + + +class OpContainer(MigrateOperation): + """Represent a sequence of operations operation.""" + + def __init__(self, ops=()): + self.ops = ops + + def is_empty(self): + return not self.ops + + def as_diffs(self): + return list(OpContainer._ops_as_diffs(self)) + + @classmethod + def _ops_as_diffs(cls, migrations): + for op in migrations.ops: + if hasattr(op, "ops"): + for sub_op in cls._ops_as_diffs(op): + yield sub_op + else: + yield op.to_diff_tuple() + + +class ModifyTableOps(OpContainer): + """Contains a sequence of operations that all apply to a single Table.""" + + def __init__(self, table_name, ops, schema=None): + super(ModifyTableOps, self).__init__(ops) + self.table_name = table_name + self.schema = schema + + def reverse(self): + return ModifyTableOps( + self.table_name, + ops=list(reversed([op.reverse() for op in self.ops])), + schema=self.schema, + ) + + +class UpgradeOps(OpContainer): + """contains a sequence of operations that would apply to the + 'upgrade' stream of a script. + + .. seealso:: + + :ref:`customizing_revision` + + """ + + def __init__(self, ops=(), upgrade_token="upgrades"): + super(UpgradeOps, self).__init__(ops=ops) + self.upgrade_token = upgrade_token + + def reverse_into(self, downgrade_ops): + downgrade_ops.ops[:] = list( + reversed([op.reverse() for op in self.ops]) + ) + return downgrade_ops + + def reverse(self): + return self.reverse_into(DowngradeOps(ops=[])) + + +class DowngradeOps(OpContainer): + """contains a sequence of operations that would apply to the + 'downgrade' stream of a script. + + .. seealso:: + + :ref:`customizing_revision` + + """ + + def __init__(self, ops=(), downgrade_token="downgrades"): + super(DowngradeOps, self).__init__(ops=ops) + self.downgrade_token = downgrade_token + + def reverse(self): + return UpgradeOps( + ops=list(reversed([op.reverse() for op in self.ops])) + ) + + +class MigrationScript(MigrateOperation): + """represents a migration script. + + E.g. when autogenerate encounters this object, this corresponds to the + production of an actual script file. + + A normal :class:`.MigrationScript` object would contain a single + :class:`.UpgradeOps` and a single :class:`.DowngradeOps` directive. + These are accessible via the ``.upgrade_ops`` and ``.downgrade_ops`` + attributes. + + In the case of an autogenerate operation that runs multiple times, + such as the multiple database example in the "multidb" template, + the ``.upgrade_ops`` and ``.downgrade_ops`` attributes are disabled, + and instead these objects should be accessed via the ``.upgrade_ops_list`` + and ``.downgrade_ops_list`` list-based attributes. These latter + attributes are always available at the very least as single-element lists. + + .. versionchanged:: 0.8.1 the ``.upgrade_ops`` and ``.downgrade_ops`` + attributes should be accessed via the ``.upgrade_ops_list`` + and ``.downgrade_ops_list`` attributes if multiple autogenerate + passes proceed on the same :class:`.MigrationScript` object. + + .. seealso:: + + :ref:`customizing_revision` + + """ + + def __init__( + self, + rev_id, + upgrade_ops, + downgrade_ops, + message=None, + imports=set(), + head=None, + splice=None, + branch_label=None, + version_path=None, + depends_on=None, + ): + self.rev_id = rev_id + self.message = message + self.imports = imports + self.head = head + self.splice = splice + self.branch_label = branch_label + self.version_path = version_path + self.depends_on = depends_on + self.upgrade_ops = upgrade_ops + self.downgrade_ops = downgrade_ops + + @property + def upgrade_ops(self): + """An instance of :class:`.UpgradeOps`. + + .. seealso:: + + :attr:`.MigrationScript.upgrade_ops_list` + """ + if len(self._upgrade_ops) > 1: + raise ValueError( + "This MigrationScript instance has a multiple-entry " + "list for UpgradeOps; please use the " + "upgrade_ops_list attribute." + ) + elif not self._upgrade_ops: + return None + else: + return self._upgrade_ops[0] + + @upgrade_ops.setter + def upgrade_ops(self, upgrade_ops): + self._upgrade_ops = util.to_list(upgrade_ops) + for elem in self._upgrade_ops: + assert isinstance(elem, UpgradeOps) + + @property + def downgrade_ops(self): + """An instance of :class:`.DowngradeOps`. + + .. seealso:: + + :attr:`.MigrationScript.downgrade_ops_list` + """ + if len(self._downgrade_ops) > 1: + raise ValueError( + "This MigrationScript instance has a multiple-entry " + "list for DowngradeOps; please use the " + "downgrade_ops_list attribute." + ) + elif not self._downgrade_ops: + return None + else: + return self._downgrade_ops[0] + + @downgrade_ops.setter + def downgrade_ops(self, downgrade_ops): + self._downgrade_ops = util.to_list(downgrade_ops) + for elem in self._downgrade_ops: + assert isinstance(elem, DowngradeOps) + + @property + def upgrade_ops_list(self): + """A list of :class:`.UpgradeOps` instances. + + This is used in place of the :attr:`.MigrationScript.upgrade_ops` + attribute when dealing with a revision operation that does + multiple autogenerate passes. + + .. versionadded:: 0.8.1 + + """ + return self._upgrade_ops + + @property + def downgrade_ops_list(self): + """A list of :class:`.DowngradeOps` instances. + + This is used in place of the :attr:`.MigrationScript.downgrade_ops` + attribute when dealing with a revision operation that does + multiple autogenerate passes. + + .. versionadded:: 0.8.1 + + """ + return self._downgrade_ops diff --git a/py3/lib/python3.6/site-packages/alembic/operations/schemaobj.py b/py3/lib/python3.6/site-packages/alembic/operations/schemaobj.py new file mode 100644 index 0000000..d90b5e6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/operations/schemaobj.py @@ -0,0 +1,181 @@ +from sqlalchemy import schema as sa_schema +from sqlalchemy.types import Integer +from sqlalchemy.types import NULLTYPE + +from .. import util +from ..util.compat import string_types + + +class SchemaObjects(object): + def __init__(self, migration_context=None): + self.migration_context = migration_context + + def primary_key_constraint(self, name, table_name, cols, schema=None): + m = self.metadata() + columns = [sa_schema.Column(n, NULLTYPE) for n in cols] + t = sa_schema.Table(table_name, m, *columns, schema=schema) + p = sa_schema.PrimaryKeyConstraint(*[t.c[n] for n in cols], name=name) + t.append_constraint(p) + return p + + def foreign_key_constraint( + self, + name, + source, + referent, + local_cols, + remote_cols, + onupdate=None, + ondelete=None, + deferrable=None, + source_schema=None, + referent_schema=None, + initially=None, + match=None, + **dialect_kw + ): + m = self.metadata() + if source == referent and source_schema == referent_schema: + t1_cols = local_cols + remote_cols + else: + t1_cols = local_cols + sa_schema.Table( + referent, + m, + *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], + schema=referent_schema + ) + + t1 = sa_schema.Table( + source, + m, + *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], + schema=source_schema + ) + + tname = ( + "%s.%s" % (referent_schema, referent) + if referent_schema + else referent + ) + + dialect_kw["match"] = match + + f = sa_schema.ForeignKeyConstraint( + local_cols, + ["%s.%s" % (tname, n) for n in remote_cols], + name=name, + onupdate=onupdate, + ondelete=ondelete, + deferrable=deferrable, + initially=initially, + **dialect_kw + ) + t1.append_constraint(f) + + return f + + def unique_constraint(self, name, source, local_cols, schema=None, **kw): + t = sa_schema.Table( + source, + self.metadata(), + *[sa_schema.Column(n, NULLTYPE) for n in local_cols], + schema=schema + ) + kw["name"] = name + uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw) + # TODO: need event tests to ensure the event + # is fired off here + t.append_constraint(uq) + return uq + + def check_constraint(self, name, source, condition, schema=None, **kw): + t = sa_schema.Table( + source, + self.metadata(), + sa_schema.Column("x", Integer), + schema=schema, + ) + ck = sa_schema.CheckConstraint(condition, name=name, **kw) + t.append_constraint(ck) + return ck + + def generic_constraint(self, name, table_name, type_, schema=None, **kw): + t = self.table(table_name, schema=schema) + types = { + "foreignkey": lambda name: sa_schema.ForeignKeyConstraint( + [], [], name=name + ), + "primary": sa_schema.PrimaryKeyConstraint, + "unique": sa_schema.UniqueConstraint, + "check": lambda name: sa_schema.CheckConstraint("", name=name), + None: sa_schema.Constraint, + } + try: + const = types[type_] + except KeyError: + raise TypeError( + "'type' can be one of %s" + % ", ".join(sorted(repr(x) for x in types)) + ) + else: + const = const(name=name) + t.append_constraint(const) + return const + + def metadata(self): + kw = {} + if ( + self.migration_context is not None + and "target_metadata" in self.migration_context.opts + ): + mt = self.migration_context.opts["target_metadata"] + if hasattr(mt, "naming_convention"): + kw["naming_convention"] = mt.naming_convention + return sa_schema.MetaData(**kw) + + def table(self, name, *columns, **kw): + m = self.metadata() + t = sa_schema.Table(name, m, *columns, **kw) + for f in t.foreign_keys: + self._ensure_table_for_fk(m, f) + return t + + def column(self, name, type_, **kw): + return sa_schema.Column(name, type_, **kw) + + def index(self, name, tablename, columns, schema=None, **kw): + t = sa_schema.Table( + tablename or "no_table", self.metadata(), schema=schema + ) + idx = sa_schema.Index( + name, + *[util.sqla_compat._textual_index_column(t, n) for n in columns], + **kw + ) + return idx + + def _parse_table_key(self, table_key): + if "." in table_key: + tokens = table_key.split(".") + sname = ".".join(tokens[0:-1]) + tname = tokens[-1] + else: + tname = table_key + sname = None + return (sname, tname) + + def _ensure_table_for_fk(self, metadata, fk): + """create a placeholder Table object for the referent of a + ForeignKey. + + """ + if isinstance(fk._colspec, string_types): + table_key, cname = fk._colspec.rsplit(".", 1) + sname, tname = self._parse_table_key(table_key) + if table_key not in metadata.tables: + rel_t = sa_schema.Table(tname, metadata, schema=sname) + else: + rel_t = metadata.tables[table_key] + if cname not in rel_t.c: + rel_t.append_column(sa_schema.Column(cname, NULLTYPE)) diff --git a/py3/lib/python3.6/site-packages/alembic/operations/toimpl.py b/py3/lib/python3.6/site-packages/alembic/operations/toimpl.py new file mode 100644 index 0000000..5699423 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/operations/toimpl.py @@ -0,0 +1,177 @@ +from sqlalchemy import schema as sa_schema + +from . import ops +from .base import Operations +from ..util import sqla_compat + + +@Operations.implementation_for(ops.AlterColumnOp) +def alter_column(operations, operation): + + compiler = operations.impl.dialect.statement_compiler( + operations.impl.dialect, None + ) + + existing_type = operation.existing_type + existing_nullable = operation.existing_nullable + existing_server_default = operation.existing_server_default + type_ = operation.modify_type + column_name = operation.column_name + table_name = operation.table_name + schema = operation.schema + server_default = operation.modify_server_default + new_column_name = operation.modify_name + nullable = operation.modify_nullable + comment = operation.modify_comment + existing_comment = operation.existing_comment + + def _count_constraint(constraint): + return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and ( + not constraint._create_rule or constraint._create_rule(compiler) + ) + + if existing_type and type_: + t = operations.schema_obj.table( + table_name, + sa_schema.Column(column_name, existing_type), + schema=schema, + ) + for constraint in t.constraints: + if _count_constraint(constraint): + operations.impl.drop_constraint(constraint) + + operations.impl.alter_column( + table_name, + column_name, + nullable=nullable, + server_default=server_default, + name=new_column_name, + type_=type_, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + comment=comment, + existing_comment=existing_comment, + **operation.kw + ) + + if type_: + t = operations.schema_obj.table( + table_name, + operations.schema_obj.column(column_name, type_), + schema=schema, + ) + for constraint in t.constraints: + if _count_constraint(constraint): + operations.impl.add_constraint(constraint) + + +@Operations.implementation_for(ops.DropTableOp) +def drop_table(operations, operation): + operations.impl.drop_table( + operation.to_table(operations.migration_context) + ) + + +@Operations.implementation_for(ops.DropColumnOp) +def drop_column(operations, operation): + column = operation.to_column(operations.migration_context) + operations.impl.drop_column( + operation.table_name, column, schema=operation.schema, **operation.kw + ) + + +@Operations.implementation_for(ops.CreateIndexOp) +def create_index(operations, operation): + idx = operation.to_index(operations.migration_context) + operations.impl.create_index(idx) + + +@Operations.implementation_for(ops.DropIndexOp) +def drop_index(operations, operation): + operations.impl.drop_index( + operation.to_index(operations.migration_context) + ) + + +@Operations.implementation_for(ops.CreateTableOp) +def create_table(operations, operation): + table = operation.to_table(operations.migration_context) + operations.impl.create_table(table) + return table + + +@Operations.implementation_for(ops.RenameTableOp) +def rename_table(operations, operation): + operations.impl.rename_table( + operation.table_name, operation.new_table_name, schema=operation.schema + ) + + +@Operations.implementation_for(ops.CreateTableCommentOp) +def create_table_comment(operations, operation): + table = operation.to_table(operations.migration_context) + operations.impl.create_table_comment(table) + + +@Operations.implementation_for(ops.DropTableCommentOp) +def drop_table_comment(operations, operation): + table = operation.to_table(operations.migration_context) + operations.impl.drop_table_comment(table) + + +@Operations.implementation_for(ops.AddColumnOp) +def add_column(operations, operation): + table_name = operation.table_name + column = operation.column + schema = operation.schema + + t = operations.schema_obj.table(table_name, column, schema=schema) + operations.impl.add_column(table_name, column, schema=schema) + for constraint in t.constraints: + if not isinstance(constraint, sa_schema.PrimaryKeyConstraint): + operations.impl.add_constraint(constraint) + for index in t.indexes: + operations.impl.create_index(index) + + with_comment = ( + sqla_compat._dialect_supports_comments(operations.impl.dialect) + and not operations.impl.dialect.inline_comments + ) + comment = sqla_compat._comment_attribute(column) + if comment and with_comment: + operations.impl.create_column_comment(column) + + +@Operations.implementation_for(ops.AddConstraintOp) +def create_constraint(operations, operation): + operations.impl.add_constraint( + operation.to_constraint(operations.migration_context) + ) + + +@Operations.implementation_for(ops.DropConstraintOp) +def drop_constraint(operations, operation): + operations.impl.drop_constraint( + operations.schema_obj.generic_constraint( + operation.constraint_name, + operation.table_name, + operation.constraint_type, + schema=operation.schema, + ) + ) + + +@Operations.implementation_for(ops.BulkInsertOp) +def bulk_insert(operations, operation): + operations.impl.bulk_insert( + operation.table, operation.rows, multiinsert=operation.multiinsert + ) + + +@Operations.implementation_for(ops.ExecuteSQLOp) +def execute_sql(operations, operation): + operations.migration_context.impl.execute( + operation.sqltext, execution_options=operation.execution_options + ) diff --git a/py3/lib/python3.6/site-packages/alembic/runtime/__init__.py b/py3/lib/python3.6/site-packages/alembic/runtime/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/py3/lib/python3.6/site-packages/alembic/runtime/environment.py b/py3/lib/python3.6/site-packages/alembic/runtime/environment.py new file mode 100644 index 0000000..c4d6586 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/runtime/environment.py @@ -0,0 +1,938 @@ +from .migration import MigrationContext +from .. import util +from ..operations import Operations + + +class EnvironmentContext(util.ModuleClsProxy): + + """A configurational facade made available in an ``env.py`` script. + + The :class:`.EnvironmentContext` acts as a *facade* to the more + nuts-and-bolts objects of :class:`.MigrationContext` as well as certain + aspects of :class:`.Config`, + within the context of the ``env.py`` script that is invoked by + most Alembic commands. + + :class:`.EnvironmentContext` is normally instantiated + when a command in :mod:`alembic.command` is run. It then makes + itself available in the ``alembic.context`` module for the scope + of the command. From within an ``env.py`` script, the current + :class:`.EnvironmentContext` is available by importing this module. + + :class:`.EnvironmentContext` also supports programmatic usage. + At this level, it acts as a Python context manager, that is, is + intended to be used using the + ``with:`` statement. A typical use of :class:`.EnvironmentContext`:: + + from alembic.config import Config + from alembic.script import ScriptDirectory + + config = Config() + config.set_main_option("script_location", "myapp:migrations") + script = ScriptDirectory.from_config(config) + + def my_function(rev, context): + '''do something with revision "rev", which + will be the current database revision, + and "context", which is the MigrationContext + that the env.py will create''' + + with EnvironmentContext( + config, + script, + fn = my_function, + as_sql = False, + starting_rev = 'base', + destination_rev = 'head', + tag = "sometag" + ): + script.run_env() + + The above script will invoke the ``env.py`` script + within the migration environment. If and when ``env.py`` + calls :meth:`.MigrationContext.run_migrations`, the + ``my_function()`` function above will be called + by the :class:`.MigrationContext`, given the context + itself as well as the current revision in the database. + + .. note:: + + For most API usages other than full blown + invocation of migration scripts, the :class:`.MigrationContext` + and :class:`.ScriptDirectory` objects can be created and + used directly. The :class:`.EnvironmentContext` object + is *only* needed when you need to actually invoke the + ``env.py`` module present in the migration environment. + + """ + + _migration_context = None + + config = None + """An instance of :class:`.Config` representing the + configuration file contents as well as other variables + set programmatically within it.""" + + script = None + """An instance of :class:`.ScriptDirectory` which provides + programmatic access to version files within the ``versions/`` + directory. + + """ + + def __init__(self, config, script, **kw): + r"""Construct a new :class:`.EnvironmentContext`. + + :param config: a :class:`.Config` instance. + :param script: a :class:`.ScriptDirectory` instance. + :param \**kw: keyword options that will be ultimately + passed along to the :class:`.MigrationContext` when + :meth:`.EnvironmentContext.configure` is called. + + """ + self.config = config + self.script = script + self.context_opts = kw + + def __enter__(self): + """Establish a context which provides a + :class:`.EnvironmentContext` object to + env.py scripts. + + The :class:`.EnvironmentContext` will + be made available as ``from alembic import context``. + + """ + self._install_proxy() + return self + + def __exit__(self, *arg, **kw): + self._remove_proxy() + + def is_offline_mode(self): + """Return True if the current migrations environment + is running in "offline mode". + + This is ``True`` or ``False`` depending + on the the ``--sql`` flag passed. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + return self.context_opts.get("as_sql", False) + + def is_transactional_ddl(self): + """Return True if the context is configured to expect a + transactional DDL capable backend. + + This defaults to the type of database in use, and + can be overridden by the ``transactional_ddl`` argument + to :meth:`.configure` + + This function requires that a :class:`.MigrationContext` + has first been made available via :meth:`.configure`. + + """ + return self.get_context().impl.transactional_ddl + + def requires_connection(self): + return not self.is_offline_mode() + + def get_head_revision(self): + """Return the hex identifier of the 'head' script revision. + + If the script directory has multiple heads, this + method raises a :class:`.CommandError`; + :meth:`.EnvironmentContext.get_head_revisions` should be preferred. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: :meth:`.EnvironmentContext.get_head_revisions` + + """ + return self.script.as_revision_number("head") + + def get_head_revisions(self): + """Return the hex identifier of the 'heads' script revision(s). + + This returns a tuple containing the version number of all + heads in the script directory. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. versionadded:: 0.7.0 + + """ + return self.script.as_revision_number("heads") + + def get_starting_revision_argument(self): + """Return the 'starting revision' argument, + if the revision was passed using ``start:end``. + + This is only meaningful in "offline" mode. + Returns ``None`` if no value is available + or was configured. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + if self._migration_context is not None: + return self.script.as_revision_number( + self.get_context()._start_from_rev + ) + elif "starting_rev" in self.context_opts: + return self.script.as_revision_number( + self.context_opts["starting_rev"] + ) + else: + # this should raise only in the case that a command + # is being run where the "starting rev" is never applicable; + # this is to catch scripts which rely upon this in + # non-sql mode or similar + raise util.CommandError( + "No starting revision argument is available." + ) + + def get_revision_argument(self): + """Get the 'destination' revision argument. + + This is typically the argument passed to the + ``upgrade`` or ``downgrade`` command. + + If it was specified as ``head``, the actual + version number is returned; if specified + as ``base``, ``None`` is returned. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + return self.script.as_revision_number( + self.context_opts["destination_rev"] + ) + + def get_tag_argument(self): + """Return the value passed for the ``--tag`` argument, if any. + + The ``--tag`` argument is not used directly by Alembic, + but is available for custom ``env.py`` configurations that + wish to use it; particularly for offline generation scripts + that wish to generate tagged filenames. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: + + :meth:`.EnvironmentContext.get_x_argument` - a newer and more + open ended system of extending ``env.py`` scripts via the command + line. + + """ + return self.context_opts.get("tag", None) + + def get_x_argument(self, as_dictionary=False): + """Return the value(s) passed for the ``-x`` argument, if any. + + The ``-x`` argument is an open ended flag that allows any user-defined + value or values to be passed on the command line, then available + here for consumption by a custom ``env.py`` script. + + The return value is a list, returned directly from the ``argparse`` + structure. If ``as_dictionary=True`` is passed, the ``x`` arguments + are parsed using ``key=value`` format into a dictionary that is + then returned. + + For example, to support passing a database URL on the command line, + the standard ``env.py`` script can be modified like this:: + + cmd_line_url = context.get_x_argument( + as_dictionary=True).get('dbname') + if cmd_line_url: + engine = create_engine(cmd_line_url) + else: + engine = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + This then takes effect by running the ``alembic`` script as:: + + alembic -x dbname=postgresql://user:pass@host/dbname upgrade head + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. versionadded:: 0.6.0 + + .. seealso:: + + :meth:`.EnvironmentContext.get_tag_argument` + + :attr:`.Config.cmd_opts` + + """ + if self.config.cmd_opts is not None: + value = self.config.cmd_opts.x or [] + else: + value = [] + if as_dictionary: + value = dict(arg.split("=", 1) for arg in value) + return value + + def configure( + self, + connection=None, + url=None, + dialect_name=None, + transactional_ddl=None, + transaction_per_migration=False, + output_buffer=None, + starting_rev=None, + tag=None, + template_args=None, + render_as_batch=False, + target_metadata=None, + include_symbol=None, + include_object=None, + include_schemas=False, + process_revision_directives=None, + compare_type=False, + compare_server_default=False, + render_item=None, + literal_binds=False, + upgrade_token="upgrades", + downgrade_token="downgrades", + alembic_module_prefix="op.", + sqlalchemy_module_prefix="sa.", + user_module_prefix=None, + on_version_apply=None, + **kw + ): + """Configure a :class:`.MigrationContext` within this + :class:`.EnvironmentContext` which will provide database + connectivity and other configuration to a series of + migration scripts. + + Many methods on :class:`.EnvironmentContext` require that + this method has been called in order to function, as they + ultimately need to have database access or at least access + to the dialect in use. Those which do are documented as such. + + The important thing needed by :meth:`.configure` is a + means to determine what kind of database dialect is in use. + An actual connection to that database is needed only if + the :class:`.MigrationContext` is to be used in + "online" mode. + + If the :meth:`.is_offline_mode` function returns ``True``, + then no connection is needed here. Otherwise, the + ``connection`` parameter should be present as an + instance of :class:`sqlalchemy.engine.Connection`. + + This function is typically called from the ``env.py`` + script within a migration environment. It can be called + multiple times for an invocation. The most recent + :class:`~sqlalchemy.engine.Connection` + for which it was called is the one that will be operated upon + by the next call to :meth:`.run_migrations`. + + General parameters: + + :param connection: a :class:`~sqlalchemy.engine.Connection` + to use + for SQL execution in "online" mode. When present, is also + used to determine the type of dialect in use. + :param url: a string database url, or a + :class:`sqlalchemy.engine.url.URL` object. + The type of dialect to be used will be derived from this if + ``connection`` is not passed. + :param dialect_name: string name of a dialect, such as + "postgresql", "mssql", etc. + The type of dialect to be used will be derived from this if + ``connection`` and ``url`` are not passed. + :param transactional_ddl: Force the usage of "transactional" + DDL on or off; + this otherwise defaults to whether or not the dialect in + use supports it. + :param transaction_per_migration: if True, nest each migration script + in a transaction rather than the full series of migrations to + run. + + .. versionadded:: 0.6.5 + + :param output_buffer: a file-like object that will be used + for textual output + when the ``--sql`` option is used to generate SQL scripts. + Defaults to + ``sys.stdout`` if not passed here and also not present on + the :class:`.Config` + object. The value here overrides that of the :class:`.Config` + object. + :param output_encoding: when using ``--sql`` to generate SQL + scripts, apply this encoding to the string output. + :param literal_binds: when using ``--sql`` to generate SQL + scripts, pass through the ``literal_binds`` flag to the compiler + so that any literal values that would ordinarily be bound + parameters are converted to plain strings. + + .. warning:: Dialects can typically only handle simple datatypes + like strings and numbers for auto-literal generation. Datatypes + like dates, intervals, and others may still require manual + formatting, typically using :meth:`.Operations.inline_literal`. + + .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy + versions prior to 0.8 where this feature is not supported. + + .. versionadded:: 0.7.6 + + .. seealso:: + + :meth:`.Operations.inline_literal` + + :param starting_rev: Override the "starting revision" argument + when using ``--sql`` mode. + :param tag: a string tag for usage by custom ``env.py`` scripts. + Set via the ``--tag`` option, can be overridden here. + :param template_args: dictionary of template arguments which + will be added to the template argument environment when + running the "revision" command. Note that the script environment + is only run within the "revision" command if the --autogenerate + option is used, or if the option "revision_environment=true" + is present in the alembic.ini file. + + :param version_table: The name of the Alembic version table. + The default is ``'alembic_version'``. + :param version_table_schema: Optional schema to place version + table within. + :param version_table_pk: boolean, whether the Alembic version table + should use a primary key constraint for the "value" column; this + only takes effect when the table is first created. + Defaults to True; setting to False should not be necessary and is + here for backwards compatibility reasons. + + .. versionadded:: 0.8.10 Added the + :paramref:`.EnvironmentContext.configure.version_table_pk` + flag and additionally established that the Alembic version table + has a primary key constraint by default. + + :param on_version_apply: a callable or collection of callables to be + run for each migration step. + The callables will be run in the order they are given, once for + each migration step, after the respective operation has been + applied but before its transaction is finalized. + Each callable accepts no positional arguments and the following + keyword arguments: + + * ``ctx``: the :class:`.MigrationContext` running the migration, + * ``step``: a :class:`.MigrationInfo` representing the + step currently being applied, + * ``heads``: a collection of version strings representing the + current heads, + * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`. + + .. versionadded:: 0.9.3 + + + Parameters specific to the autogenerate feature, when + ``alembic revision`` is run with the ``--autogenerate`` feature: + + :param target_metadata: a :class:`sqlalchemy.schema.MetaData` + object, or a sequence of :class:`~sqlalchemy.schema.MetaData` + objects, that will be consulted during autogeneration. + The tables present in each :class:`~sqlalchemy.schema.MetaData` + will be compared against + what is locally available on the target + :class:`~sqlalchemy.engine.Connection` + to produce candidate upgrade/downgrade operations. + + .. versionchanged:: 0.9.0 the + :paramref:`.EnvironmentContext.configure.target_metadata` + parameter may now be passed a sequence of + :class:`~sqlalchemy.schema.MetaData` objects to support + autogeneration of multiple :class:`~sqlalchemy.schema.MetaData` + collections. + + :param compare_type: Indicates type comparison behavior during + an autogenerate + operation. Defaults to ``False`` which disables type + comparison. Set to + ``True`` to turn on default type comparison, which has varied + accuracy depending on backend. See :ref:`compare_types` + for an example as well as information on other type + comparison options. + + .. seealso:: + + :ref:`compare_types` + + :paramref:`.EnvironmentContext.configure.compare_server_default` + + :param compare_server_default: Indicates server default comparison + behavior during + an autogenerate operation. Defaults to ``False`` which disables + server default + comparison. Set to ``True`` to turn on server default comparison, + which has + varied accuracy depending on backend. + + To customize server default comparison behavior, a callable may + be specified + which can filter server default comparisons during an + autogenerate operation. + defaults during an autogenerate operation. The format of this + callable is:: + + def my_compare_server_default(context, inspected_column, + metadata_column, inspected_default, metadata_default, + rendered_metadata_default): + # return True if the defaults are different, + # False if not, or None to allow the default implementation + # to compare these defaults + return None + + context.configure( + # ... + compare_server_default = my_compare_server_default + ) + + ``inspected_column`` is a dictionary structure as returned by + :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas + ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from + the local model environment. + + A return value of ``None`` indicates to allow default server default + comparison + to proceed. Note that some backends such as Postgresql actually + execute + the two defaults on the database side to compare for equivalence. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.compare_type` + + :param include_object: A callable function which is given + the chance to return ``True`` or ``False`` for any object, + indicating if the given object should be considered in the + autogenerate sweep. + + The function accepts the following positional arguments: + + * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such + as a :class:`~sqlalchemy.schema.Table`, + :class:`~sqlalchemy.schema.Column`, + :class:`~sqlalchemy.schema.Index` + :class:`~sqlalchemy.schema.UniqueConstraint`, + or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object + * ``name``: the name of the object. This is typically available + via ``object.name``. + * ``type``: a string describing the type of object; currently + ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``, + or ``"foreign_key_constraint"`` + + .. versionadded:: 0.7.0 Support for indexes and unique constraints + within the + :paramref:`~.EnvironmentContext.configure.include_object` hook. + + .. versionadded:: 0.7.1 Support for foreign keys within the + :paramref:`~.EnvironmentContext.configure.include_object` hook. + + * ``reflected``: ``True`` if the given object was produced based on + table reflection, ``False`` if it's from a local :class:`.MetaData` + object. + * ``compare_to``: the object being compared against, if available, + else ``None``. + + E.g.:: + + def include_object(object, name, type_, reflected, compare_to): + if (type_ == "column" and + not reflected and + object.info.get("skip_autogenerate", False)): + return False + else: + return True + + context.configure( + # ... + include_object = include_object + ) + + :paramref:`.EnvironmentContext.configure.include_object` can also + be used to filter on specific schemas to include or omit, when + the :paramref:`.EnvironmentContext.configure.include_schemas` + flag is set to ``True``. The :attr:`.Table.schema` attribute + on each :class:`.Table` object reflected will indicate the name of the + schema from which the :class:`.Table` originates. + + .. versionadded:: 0.6.0 + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.include_schemas` + + :param include_symbol: A callable function which, given a table name + and schema name (may be ``None``), returns ``True`` or ``False``, + indicating if the given table should be considered in the + autogenerate sweep. + + .. deprecated:: 0.6.0 + :paramref:`.EnvironmentContext.configure.include_symbol` + is superceded by the more generic + :paramref:`.EnvironmentContext.configure.include_object` + parameter. + + E.g.:: + + def include_symbol(tablename, schema): + return tablename not in ("skip_table_one", "skip_table_two") + + context.configure( + # ... + include_symbol = include_symbol + ) + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.include_schemas` + + :paramref:`.EnvironmentContext.configure.include_object` + + :param render_as_batch: if True, commands which alter elements + within a table will be placed under a ``with batch_alter_table():`` + directive, so that batch migrations will take place. + + .. versionadded:: 0.7.0 + + .. seealso:: + + :ref:`batch_migrations` + + :param include_schemas: If True, autogenerate will scan across + all schemas located by the SQLAlchemy + :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names` + method, and include all differences in tables found across all + those schemas. When using this option, you may want to also + use the :paramref:`.EnvironmentContext.configure.include_object` + option to specify a callable which + can filter the tables/schemas that get included. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.include_object` + + :param render_item: Callable that can be used to override how + any schema item, i.e. column, constraint, type, + etc., is rendered for autogenerate. The callable receives a + string describing the type of object, the object, and + the autogen context. If it returns False, the + default rendering method will be used. If it returns None, + the item will not be rendered in the context of a Table + construct, that is, can be used to skip columns or constraints + within op.create_table():: + + def my_render_column(type_, col, autogen_context): + if type_ == "column" and isinstance(col, MySpecialCol): + return repr(col) + else: + return False + + context.configure( + # ... + render_item = my_render_column + ) + + Available values for the type string include: ``"column"``, + ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``, + ``"type"``, ``"server_default"``. + + .. seealso:: + + :ref:`autogen_render_types` + + :param upgrade_token: When autogenerate completes, the text of the + candidate upgrade operations will be present in this template + variable when ``script.py.mako`` is rendered. Defaults to + ``upgrades``. + :param downgrade_token: When autogenerate completes, the text of the + candidate downgrade operations will be present in this + template variable when ``script.py.mako`` is rendered. Defaults to + ``downgrades``. + + :param alembic_module_prefix: When autogenerate refers to Alembic + :mod:`alembic.operations` constructs, this prefix will be used + (i.e. ``op.create_table``) Defaults to "``op.``". + Can be ``None`` to indicate no prefix. + + :param sqlalchemy_module_prefix: When autogenerate refers to + SQLAlchemy + :class:`~sqlalchemy.schema.Column` or type classes, this prefix + will be used + (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``". + Can be ``None`` to indicate no prefix. + Note that when dialect-specific types are rendered, autogenerate + will render them using the dialect module name, i.e. ``mssql.BIT()``, + ``postgresql.UUID()``. + + :param user_module_prefix: When autogenerate refers to a SQLAlchemy + type (e.g. :class:`.TypeEngine`) where the module name is not + under the ``sqlalchemy`` namespace, this prefix will be used + within autogenerate. If left at its default of + ``None``, the ``__module__`` attribute of the type is used to + render the import module. It's a good practice to set this + and to have all custom types be available from a fixed module space, + in order to future-proof migration files against reorganizations + in modules. + + .. versionchanged:: 0.7.0 + :paramref:`.EnvironmentContext.configure.user_module_prefix` + no longer defaults to the value of + :paramref:`.EnvironmentContext.configure.sqlalchemy_module_prefix` + when left at ``None``; the ``__module__`` attribute is now used. + + .. versionadded:: 0.6.3 added + :paramref:`.EnvironmentContext.configure.user_module_prefix` + + .. seealso:: + + :ref:`autogen_module_prefix` + + :param process_revision_directives: a callable function that will + be passed a structure representing the end result of an autogenerate + or plain "revision" operation, which can be manipulated to affect + how the ``alembic revision`` command ultimately outputs new + revision scripts. The structure of the callable is:: + + def process_revision_directives(context, revision, directives): + pass + + The ``directives`` parameter is a Python list containing + a single :class:`.MigrationScript` directive, which represents + the revision file to be generated. This list as well as its + contents may be freely modified to produce any set of commands. + The section :ref:`customizing_revision` shows an example of + doing this. The ``context`` parameter is the + :class:`.MigrationContext` in use, + and ``revision`` is a tuple of revision identifiers representing the + current revision of the database. + + The callable is invoked at all times when the ``--autogenerate`` + option is passed to ``alembic revision``. If ``--autogenerate`` + is not passed, the callable is invoked only if the + ``revision_environment`` variable is set to True in the Alembic + configuration, in which case the given ``directives`` collection + will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps` + collections for ``.upgrade_ops`` and ``.downgrade_ops``. The + ``--autogenerate`` option itself can be inferred by inspecting + ``context.config.cmd_opts.autogenerate``. + + The callable function may optionally be an instance of + a :class:`.Rewriter` object. This is a helper object that + assists in the production of autogenerate-stream rewriter functions. + + + .. versionadded:: 0.8.0 + + .. versionchanged:: 0.8.1 - The + :paramref:`.EnvironmentContext.configure.process_revision_directives` + hook can append op directives into :class:`.UpgradeOps` and + :class:`.DowngradeOps` which will be rendered in Python regardless + of whether the ``--autogenerate`` option is in use or not; + the ``revision_environment`` configuration variable should be + set to "true" in the config to enable this. + + + .. seealso:: + + :ref:`customizing_revision` + + :ref:`autogen_rewriter` + + :paramref:`.command.revision.process_revision_directives` + + Parameters specific to individual backends: + + :param mssql_batch_separator: The "batch separator" which will + be placed between each statement when generating offline SQL Server + migrations. Defaults to ``GO``. Note this is in addition to the + customary semicolon ``;`` at the end of each statement; SQL Server + considers the "batch separator" to denote the end of an + individual statement execution, and cannot group certain + dependent operations in one step. + :param oracle_batch_separator: The "batch separator" which will + be placed between each statement when generating offline + Oracle migrations. Defaults to ``/``. Oracle doesn't add a + semicolon between statements like most other backends. + + """ + opts = self.context_opts + if transactional_ddl is not None: + opts["transactional_ddl"] = transactional_ddl + if output_buffer is not None: + opts["output_buffer"] = output_buffer + elif self.config.output_buffer is not None: + opts["output_buffer"] = self.config.output_buffer + if starting_rev: + opts["starting_rev"] = starting_rev + if tag: + opts["tag"] = tag + if template_args and "template_args" in opts: + opts["template_args"].update(template_args) + opts["transaction_per_migration"] = transaction_per_migration + opts["target_metadata"] = target_metadata + opts["include_symbol"] = include_symbol + opts["include_object"] = include_object + opts["include_schemas"] = include_schemas + opts["render_as_batch"] = render_as_batch + opts["upgrade_token"] = upgrade_token + opts["downgrade_token"] = downgrade_token + opts["sqlalchemy_module_prefix"] = sqlalchemy_module_prefix + opts["alembic_module_prefix"] = alembic_module_prefix + opts["user_module_prefix"] = user_module_prefix + opts["literal_binds"] = literal_binds + opts["process_revision_directives"] = process_revision_directives + opts["on_version_apply"] = util.to_tuple(on_version_apply, default=()) + + if render_item is not None: + opts["render_item"] = render_item + if compare_type is not None: + opts["compare_type"] = compare_type + if compare_server_default is not None: + opts["compare_server_default"] = compare_server_default + opts["script"] = self.script + + opts.update(kw) + + self._migration_context = MigrationContext.configure( + connection=connection, + url=url, + dialect_name=dialect_name, + environment_context=self, + opts=opts, + ) + + def run_migrations(self, **kw): + """Run migrations as determined by the current command line + configuration + as well as versioning information present (or not) in the current + database connection (if one is present). + + The function accepts optional ``**kw`` arguments. If these are + passed, they are sent directly to the ``upgrade()`` and + ``downgrade()`` + functions within each target revision file. By modifying the + ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` + functions accept arguments, parameters can be passed here so that + contextual information, usually information to identify a particular + database in use, can be passed from a custom ``env.py`` script + to the migration functions. + + This function requires that a :class:`.MigrationContext` has + first been made available via :meth:`.configure`. + + """ + with Operations.context(self._migration_context): + self.get_context().run_migrations(**kw) + + def execute(self, sql, execution_options=None): + """Execute the given SQL using the current change context. + + The behavior of :meth:`.execute` is the same + as that of :meth:`.Operations.execute`. Please see that + function's documentation for full detail including + caveats and limitations. + + This function requires that a :class:`.MigrationContext` has + first been made available via :meth:`.configure`. + + """ + self.get_context().execute(sql, execution_options=execution_options) + + def static_output(self, text): + """Emit text directly to the "offline" SQL stream. + + Typically this is for emitting comments that + start with --. The statement is not treated + as a SQL execution, no ; or batch separator + is added, etc. + + """ + self.get_context().impl.static_output(text) + + def begin_transaction(self): + """Return a context manager that will + enclose an operation within a "transaction", + as defined by the environment's offline + and transactional DDL settings. + + e.g.:: + + with context.begin_transaction(): + context.run_migrations() + + :meth:`.begin_transaction` is intended to + "do the right thing" regardless of + calling context: + + * If :meth:`.is_transactional_ddl` is ``False``, + returns a "do nothing" context manager + which otherwise produces no transactional + state or directives. + * If :meth:`.is_offline_mode` is ``True``, + returns a context manager that will + invoke the :meth:`.DefaultImpl.emit_begin` + and :meth:`.DefaultImpl.emit_commit` + methods, which will produce the string + directives ``BEGIN`` and ``COMMIT`` on + the output stream, as rendered by the + target backend (e.g. SQL Server would + emit ``BEGIN TRANSACTION``). + * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin` + on the current online connection, which + returns a :class:`sqlalchemy.engine.Transaction` + object. This object demarcates a real + transaction and is itself a context manager, + which will roll back if an exception + is raised. + + Note that a custom ``env.py`` script which + has more specific transactional needs can of course + manipulate the :class:`~sqlalchemy.engine.Connection` + directly to produce transactional state in "online" + mode. + + """ + + return self.get_context().begin_transaction() + + def get_context(self): + """Return the current :class:`.MigrationContext` object. + + If :meth:`.EnvironmentContext.configure` has not been + called yet, raises an exception. + + """ + + if self._migration_context is None: + raise Exception("No context has been configured yet.") + return self._migration_context + + def get_bind(self): + """Return the current 'bind'. + + In "online" mode, this is the + :class:`sqlalchemy.engine.Connection` currently being used + to emit SQL to the database. + + This function requires that a :class:`.MigrationContext` + has first been made available via :meth:`.configure`. + + """ + return self.get_context().bind + + def get_impl(self): + return self.get_context().impl diff --git a/py3/lib/python3.6/site-packages/alembic/runtime/migration.py b/py3/lib/python3.6/site-packages/alembic/runtime/migration.py new file mode 100644 index 0000000..b3a9863 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/runtime/migration.py @@ -0,0 +1,1066 @@ +from contextlib import contextmanager +import logging +import sys + +from sqlalchemy import Column +from sqlalchemy import literal_column +from sqlalchemy import MetaData +from sqlalchemy import PrimaryKeyConstraint +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy.engine import Connection +from sqlalchemy.engine import url as sqla_url +from sqlalchemy.engine.strategies import MockEngineStrategy + +from .. import ddl +from .. import util +from ..util.compat import callable +from ..util.compat import EncodedIO + +log = logging.getLogger(__name__) + + +class MigrationContext(object): + + """Represent the database state made available to a migration + script. + + :class:`.MigrationContext` is the front end to an actual + database connection, or alternatively a string output + stream given a particular database dialect, + from an Alembic perspective. + + When inside the ``env.py`` script, the :class:`.MigrationContext` + is available via the + :meth:`.EnvironmentContext.get_context` method, + which is available at ``alembic.context``:: + + # from within env.py script + from alembic import context + migration_context = context.get_context() + + For usage outside of an ``env.py`` script, such as for + utility routines that want to check the current version + in the database, the :meth:`.MigrationContext.configure` + method to create new :class:`.MigrationContext` objects. + For example, to get at the current revision in the + database using :meth:`.MigrationContext.get_current_revision`:: + + # in any application, outside of an env.py script + from alembic.migration import MigrationContext + from sqlalchemy import create_engine + + engine = create_engine("postgresql://mydatabase") + conn = engine.connect() + + context = MigrationContext.configure(conn) + current_rev = context.get_current_revision() + + The above context can also be used to produce + Alembic migration operations with an :class:`.Operations` + instance:: + + # in any application, outside of the normal Alembic environment + from alembic.operations import Operations + op = Operations(context) + op.alter_column("mytable", "somecolumn", nullable=True) + + """ + + def __init__(self, dialect, connection, opts, environment_context=None): + self.environment_context = environment_context + self.opts = opts + self.dialect = dialect + self.script = opts.get("script") + as_sql = opts.get("as_sql", False) + transactional_ddl = opts.get("transactional_ddl") + self._transaction_per_migration = opts.get( + "transaction_per_migration", False + ) + self.on_version_apply_callbacks = opts.get("on_version_apply", ()) + + if as_sql: + self.connection = self._stdout_connection(connection) + assert self.connection is not None + else: + self.connection = connection + self._migrations_fn = opts.get("fn") + self.as_sql = as_sql + + if "output_encoding" in opts: + self.output_buffer = EncodedIO( + opts.get("output_buffer") or sys.stdout, + opts["output_encoding"], + ) + else: + self.output_buffer = opts.get("output_buffer", sys.stdout) + + self._user_compare_type = opts.get("compare_type", False) + self._user_compare_server_default = opts.get( + "compare_server_default", False + ) + self.version_table = version_table = opts.get( + "version_table", "alembic_version" + ) + self.version_table_schema = version_table_schema = opts.get( + "version_table_schema", None + ) + self._version = Table( + version_table, + MetaData(), + Column("version_num", String(32), nullable=False), + schema=version_table_schema, + ) + if opts.get("version_table_pk", True): + self._version.append_constraint( + PrimaryKeyConstraint( + "version_num", name="%s_pkc" % version_table + ) + ) + + self._start_from_rev = opts.get("starting_rev") + self.impl = ddl.DefaultImpl.get_by_dialect(dialect)( + dialect, + self.connection, + self.as_sql, + transactional_ddl, + self.output_buffer, + opts, + ) + log.info("Context impl %s.", self.impl.__class__.__name__) + if self.as_sql: + log.info("Generating static SQL") + log.info( + "Will assume %s DDL.", + "transactional" + if self.impl.transactional_ddl + else "non-transactional", + ) + + @classmethod + def configure( + cls, + connection=None, + url=None, + dialect_name=None, + dialect=None, + environment_context=None, + opts=None, + ): + """Create a new :class:`.MigrationContext`. + + This is a factory method usually called + by :meth:`.EnvironmentContext.configure`. + + :param connection: a :class:`~sqlalchemy.engine.Connection` + to use for SQL execution in "online" mode. When present, + is also used to determine the type of dialect in use. + :param url: a string database url, or a + :class:`sqlalchemy.engine.url.URL` object. + The type of dialect to be used will be derived from this if + ``connection`` is not passed. + :param dialect_name: string name of a dialect, such as + "postgresql", "mssql", etc. The type of dialect to be used will be + derived from this if ``connection`` and ``url`` are not passed. + :param opts: dictionary of options. Most other options + accepted by :meth:`.EnvironmentContext.configure` are passed via + this dictionary. + + """ + if opts is None: + opts = {} + + if connection: + if not isinstance(connection, Connection): + util.warn( + "'connection' argument to configure() is expected " + "to be a sqlalchemy.engine.Connection instance, " + "got %r" % connection, + stacklevel=3 + ) + dialect = connection.dialect + elif url: + url = sqla_url.make_url(url) + dialect = url.get_dialect()() + elif dialect_name: + url = sqla_url.make_url("%s://" % dialect_name) + dialect = url.get_dialect()() + elif not dialect: + raise Exception("Connection, url, or dialect_name is required.") + + return MigrationContext(dialect, connection, opts, environment_context) + + def begin_transaction(self, _per_migration=False): + transaction_now = _per_migration == self._transaction_per_migration + + if not transaction_now: + + @contextmanager + def do_nothing(): + yield + + return do_nothing() + + elif not self.impl.transactional_ddl: + + @contextmanager + def do_nothing(): + yield + + return do_nothing() + elif self.as_sql: + + @contextmanager + def begin_commit(): + self.impl.emit_begin() + yield + self.impl.emit_commit() + + return begin_commit() + else: + return self.bind.begin() + + def get_current_revision(self): + """Return the current revision, usually that which is present + in the ``alembic_version`` table in the database. + + This method intends to be used only for a migration stream that + does not contain unmerged branches in the target database; + if there are multiple branches present, an exception is raised. + The :meth:`.MigrationContext.get_current_heads` should be preferred + over this method going forward in order to be compatible with + branch migration support. + + If this :class:`.MigrationContext` was configured in "offline" + mode, that is with ``as_sql=True``, the ``starting_rev`` + parameter is returned instead, if any. + + """ + heads = self.get_current_heads() + if len(heads) == 0: + return None + elif len(heads) > 1: + raise util.CommandError( + "Version table '%s' has more than one head present; " + "please use get_current_heads()" % self.version_table + ) + else: + return heads[0] + + def get_current_heads(self): + """Return a tuple of the current 'head versions' that are represented + in the target database. + + For a migration stream without branches, this will be a single + value, synonymous with that of + :meth:`.MigrationContext.get_current_revision`. However when multiple + unmerged branches exist within the target database, the returned tuple + will contain a value for each head. + + If this :class:`.MigrationContext` was configured in "offline" + mode, that is with ``as_sql=True``, the ``starting_rev`` + parameter is returned in a one-length tuple. + + If no version table is present, or if there are no revisions + present, an empty tuple is returned. + + .. versionadded:: 0.7.0 + + """ + if self.as_sql: + start_from_rev = self._start_from_rev + if start_from_rev == "base": + start_from_rev = None + elif start_from_rev is not None and self.script: + start_from_rev = self.script.get_revision( + start_from_rev + ).revision + + return util.to_tuple(start_from_rev, default=()) + else: + if self._start_from_rev: + raise util.CommandError( + "Can't specify current_rev to context " + "when using a database connection" + ) + if not self._has_version_table(): + return () + return tuple( + row[0] for row in self.connection.execute(self._version.select()) + ) + + def _ensure_version_table(self): + self._version.create(self.connection, checkfirst=True) + + def _has_version_table(self): + return self.connection.dialect.has_table( + self.connection, self.version_table, self.version_table_schema + ) + + def stamp(self, script_directory, revision): + """Stamp the version table with a specific revision. + + This method calculates those branches to which the given revision + can apply, and updates those branches as though they were migrated + towards that revision (either up or down). If no current branches + include the revision, it is added as a new branch head. + + .. versionadded:: 0.7.0 + + """ + heads = self.get_current_heads() + if not self.as_sql and not heads: + self._ensure_version_table() + head_maintainer = HeadMaintainer(self, heads) + for step in script_directory._stamp_revs(revision, heads): + head_maintainer.update_to_step(step) + + def run_migrations(self, **kw): + r"""Run the migration scripts established for this + :class:`.MigrationContext`, if any. + + The commands in :mod:`alembic.command` will set up a function + that is ultimately passed to the :class:`.MigrationContext` + as the ``fn`` argument. This function represents the "work" + that will be done when :meth:`.MigrationContext.run_migrations` + is called, typically from within the ``env.py`` script of the + migration environment. The "work function" then provides an iterable + of version callables and other version information which + in the case of the ``upgrade`` or ``downgrade`` commands are the + list of version scripts to invoke. Other commands yield nothing, + in the case that a command wants to run some other operation + against the database such as the ``current`` or ``stamp`` commands. + + :param \**kw: keyword arguments here will be passed to each + migration callable, that is the ``upgrade()`` or ``downgrade()`` + method within revision scripts. + + """ + self.impl.start_migrations() + + heads = self.get_current_heads() + if not self.as_sql and not heads: + self._ensure_version_table() + + head_maintainer = HeadMaintainer(self, heads) + + starting_in_transaction = ( + not self.as_sql and self._in_connection_transaction() + ) + + for step in self._migrations_fn(heads, self): + with self.begin_transaction(_per_migration=True): + if self.as_sql and not head_maintainer.heads: + # for offline mode, include a CREATE TABLE from + # the base + self._version.create(self.connection) + log.info("Running %s", step) + if self.as_sql: + self.impl.static_output( + "-- Running %s" % (step.short_log,) + ) + step.migration_fn(**kw) + + # previously, we wouldn't stamp per migration + # if we were in a transaction, however given the more + # complex model that involves any number of inserts + # and row-targeted updates and deletes, it's simpler for now + # just to run the operations on every version + head_maintainer.update_to_step(step) + for callback in self.on_version_apply_callbacks: + callback( + ctx=self, + step=step.info, + heads=set(head_maintainer.heads), + run_args=kw, + ) + + if ( + not starting_in_transaction + and not self.as_sql + and not self.impl.transactional_ddl + and self._in_connection_transaction() + ): + raise util.CommandError( + 'Migration "%s" has left an uncommitted ' + "transaction opened; transactional_ddl is False so " + "Alembic is not committing transactions" % step + ) + + if self.as_sql and not head_maintainer.heads: + self._version.drop(self.connection) + + def _in_connection_transaction(self): + try: + meth = self.connection.in_transaction + except AttributeError: + return False + else: + return meth() + + def execute(self, sql, execution_options=None): + """Execute a SQL construct or string statement. + + The underlying execution mechanics are used, that is + if this is "offline mode" the SQL is written to the + output buffer, otherwise the SQL is emitted on + the current SQLAlchemy connection. + + """ + self.impl._exec(sql, execution_options) + + def _stdout_connection(self, connection): + def dump(construct, *multiparams, **params): + self.impl._exec(construct) + + return MockEngineStrategy.MockConnection(self.dialect, dump) + + @property + def bind(self): + """Return the current "bind". + + In online mode, this is an instance of + :class:`sqlalchemy.engine.Connection`, and is suitable + for ad-hoc execution of any kind of usage described + in :ref:`sqlexpression_toplevel` as well as + for usage with the :meth:`sqlalchemy.schema.Table.create` + and :meth:`sqlalchemy.schema.MetaData.create_all` methods + of :class:`~sqlalchemy.schema.Table`, + :class:`~sqlalchemy.schema.MetaData`. + + Note that when "standard output" mode is enabled, + this bind will be a "mock" connection handler that cannot + return results and is only appropriate for a very limited + subset of commands. + + """ + return self.connection + + @property + def config(self): + """Return the :class:`.Config` used by the current environment, if any. + + .. versionadded:: 0.6.6 + + """ + if self.environment_context: + return self.environment_context.config + else: + return None + + def _compare_type(self, inspector_column, metadata_column): + if self._user_compare_type is False: + return False + + if callable(self._user_compare_type): + user_value = self._user_compare_type( + self, + inspector_column, + metadata_column, + inspector_column.type, + metadata_column.type, + ) + if user_value is not None: + return user_value + + return self.impl.compare_type(inspector_column, metadata_column) + + def _compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_column_default, + ): + + if self._user_compare_server_default is False: + return False + + if callable(self._user_compare_server_default): + user_value = self._user_compare_server_default( + self, + inspector_column, + metadata_column, + rendered_column_default, + metadata_column.server_default, + rendered_metadata_default, + ) + if user_value is not None: + return user_value + + return self.impl.compare_server_default( + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_column_default, + ) + + +class HeadMaintainer(object): + def __init__(self, context, heads): + self.context = context + self.heads = set(heads) + + def _insert_version(self, version): + assert version not in self.heads + self.heads.add(version) + + self.context.impl._exec( + self.context._version.insert().values( + version_num=literal_column("'%s'" % version) + ) + ) + + def _delete_version(self, version): + self.heads.remove(version) + + ret = self.context.impl._exec( + self.context._version.delete().where( + self.context._version.c.version_num + == literal_column("'%s'" % version) + ) + ) + if not self.context.as_sql and ret.rowcount != 1: + raise util.CommandError( + "Online migration expected to match one " + "row when deleting '%s' in '%s'; " + "%d found" + % (version, self.context.version_table, ret.rowcount) + ) + + def _update_version(self, from_, to_): + assert to_ not in self.heads + self.heads.remove(from_) + self.heads.add(to_) + + ret = self.context.impl._exec( + self.context._version.update() + .values(version_num=literal_column("'%s'" % to_)) + .where( + self.context._version.c.version_num + == literal_column("'%s'" % from_) + ) + ) + if not self.context.as_sql and ret.rowcount != 1: + raise util.CommandError( + "Online migration expected to match one " + "row when updating '%s' to '%s' in '%s'; " + "%d found" + % (from_, to_, self.context.version_table, ret.rowcount) + ) + + def update_to_step(self, step): + if step.should_delete_branch(self.heads): + vers = step.delete_version_num + log.debug("branch delete %s", vers) + self._delete_version(vers) + elif step.should_create_branch(self.heads): + vers = step.insert_version_num + log.debug("new branch insert %s", vers) + self._insert_version(vers) + elif step.should_merge_branches(self.heads): + # delete revs, update from rev, update to rev + ( + delete_revs, + update_from_rev, + update_to_rev, + ) = step.merge_branch_idents(self.heads) + log.debug( + "merge, delete %s, update %s to %s", + delete_revs, + update_from_rev, + update_to_rev, + ) + for delrev in delete_revs: + self._delete_version(delrev) + self._update_version(update_from_rev, update_to_rev) + elif step.should_unmerge_branches(self.heads): + ( + update_from_rev, + update_to_rev, + insert_revs, + ) = step.unmerge_branch_idents(self.heads) + log.debug( + "unmerge, insert %s, update %s to %s", + insert_revs, + update_from_rev, + update_to_rev, + ) + for insrev in insert_revs: + self._insert_version(insrev) + self._update_version(update_from_rev, update_to_rev) + else: + from_, to_ = step.update_version_num(self.heads) + log.debug("update %s to %s", from_, to_) + self._update_version(from_, to_) + + +class MigrationInfo(object): + """Exposes information about a migration step to a callback listener. + + The :class:`.MigrationInfo` object is available exclusively for the + benefit of the :paramref:`.EnvironmentContext.on_version_apply` + callback hook. + + .. versionadded:: 0.9.3 + + """ + + is_upgrade = None + """True/False: indicates whether this operation ascends or descends the + version tree.""" + + is_stamp = None + """True/False: indicates whether this operation is a stamp (i.e. whether + it results in any actual database operations).""" + + up_revision_id = None + """Version string corresponding to :attr:`.Revision.revision`. + + In the case of a stamp operation, it is advised to use the + :attr:`.MigrationInfo.up_revision_ids` tuple as a stamp operation can + make a single movement from one or more branches down to a single + branchpoint, in which case there will be multiple "up" revisions. + + .. seealso:: + + :attr:`.MigrationInfo.up_revision_ids` + + """ + + up_revision_ids = None + """Tuple of version strings corresponding to :attr:`.Revision.revision`. + + In the majority of cases, this tuple will be a single value, synonomous + with the scalar value of :attr:`.MigrationInfo.up_revision_id`. + It can be multiple revision identifiers only in the case of an + ``alembic stamp`` operation which is moving downwards from multiple + branches down to their common branch point. + + .. versionadded:: 0.9.4 + + """ + + down_revision_ids = None + """Tuple of strings representing the base revisions of this migration step. + + If empty, this represents a root revision; otherwise, the first item + corresponds to :attr:`.Revision.down_revision`, and the rest are inferred + from dependencies. + """ + + revision_map = None + """The revision map inside of which this operation occurs.""" + + def __init__( + self, revision_map, is_upgrade, is_stamp, up_revisions, down_revisions + ): + self.revision_map = revision_map + self.is_upgrade = is_upgrade + self.is_stamp = is_stamp + self.up_revision_ids = util.to_tuple(up_revisions, default=()) + if self.up_revision_ids: + self.up_revision_id = self.up_revision_ids[0] + else: + # this should never be the case with + # "upgrade", "downgrade", or "stamp" as we are always + # measuring movement in terms of at least one upgrade version + self.up_revision_id = None + self.down_revision_ids = util.to_tuple(down_revisions, default=()) + + @property + def is_migration(self): + """True/False: indicates whether this operation is a migration. + + At present this is true if and only the migration is not a stamp. + If other operation types are added in the future, both this attribute + and :attr:`~.MigrationInfo.is_stamp` will be false. + """ + return not self.is_stamp + + @property + def source_revision_ids(self): + """Active revisions before this migration step is applied.""" + return ( + self.down_revision_ids if self.is_upgrade else self.up_revision_ids + ) + + @property + def destination_revision_ids(self): + """Active revisions after this migration step is applied.""" + return ( + self.up_revision_ids if self.is_upgrade else self.down_revision_ids + ) + + @property + def up_revision(self): + """Get :attr:`~.MigrationInfo.up_revision_id` as + a :class:`.Revision`. + + """ + return self.revision_map.get_revision(self.up_revision_id) + + @property + def up_revisions(self): + """Get :attr:`~.MigrationInfo.up_revision_ids` as a :class:`.Revision`. + + .. versionadded:: 0.9.4 + + """ + return self.revision_map.get_revisions(self.up_revision_ids) + + @property + def down_revisions(self): + """Get :attr:`~.MigrationInfo.down_revision_ids` as a tuple of + :class:`Revisions <.Revision>`.""" + return self.revision_map.get_revisions(self.down_revision_ids) + + @property + def source_revisions(self): + """Get :attr:`~MigrationInfo.source_revision_ids` as a tuple of + :class:`Revisions <.Revision>`.""" + return self.revision_map.get_revisions(self.source_revision_ids) + + @property + def destination_revisions(self): + """Get :attr:`~MigrationInfo.destination_revision_ids` as a tuple of + :class:`Revisions <.Revision>`.""" + return self.revision_map.get_revisions(self.destination_revision_ids) + + +class MigrationStep(object): + @property + def name(self): + return self.migration_fn.__name__ + + @classmethod + def upgrade_from_script(cls, revision_map, script): + return RevisionStep(revision_map, script, True) + + @classmethod + def downgrade_from_script(cls, revision_map, script): + return RevisionStep(revision_map, script, False) + + @property + def is_downgrade(self): + return not self.is_upgrade + + @property + def short_log(self): + return "%s %s -> %s" % ( + self.name, + util.format_as_comma(self.from_revisions_no_deps), + util.format_as_comma(self.to_revisions_no_deps), + ) + + def __str__(self): + if self.doc: + return "%s %s -> %s, %s" % ( + self.name, + util.format_as_comma(self.from_revisions_no_deps), + util.format_as_comma(self.to_revisions_no_deps), + self.doc, + ) + else: + return self.short_log + + +class RevisionStep(MigrationStep): + def __init__(self, revision_map, revision, is_upgrade): + self.revision_map = revision_map + self.revision = revision + self.is_upgrade = is_upgrade + if is_upgrade: + self.migration_fn = revision.module.upgrade + else: + self.migration_fn = revision.module.downgrade + + def __repr__(self): + return "RevisionStep(%r, is_upgrade=%r)" % ( + self.revision.revision, + self.is_upgrade, + ) + + def __eq__(self, other): + return ( + isinstance(other, RevisionStep) + and other.revision == self.revision + and self.is_upgrade == other.is_upgrade + ) + + @property + def doc(self): + return self.revision.doc + + @property + def from_revisions(self): + if self.is_upgrade: + return self.revision._all_down_revisions + else: + return (self.revision.revision,) + + @property + def from_revisions_no_deps(self): + if self.is_upgrade: + return self.revision._versioned_down_revisions + else: + return (self.revision.revision,) + + @property + def to_revisions(self): + if self.is_upgrade: + return (self.revision.revision,) + else: + return self.revision._all_down_revisions + + @property + def to_revisions_no_deps(self): + if self.is_upgrade: + return (self.revision.revision,) + else: + return self.revision._versioned_down_revisions + + @property + def _has_scalar_down_revision(self): + return len(self.revision._all_down_revisions) == 1 + + def should_delete_branch(self, heads): + """A delete is when we are a. in a downgrade and b. + we are going to the "base" or we are going to a version that + is implied as a dependency on another version that is remaining. + + """ + if not self.is_downgrade: + return False + + if self.revision.revision not in heads: + return False + + downrevs = self.revision._all_down_revisions + + if not downrevs: + # is a base + return True + else: + # determine what the ultimate "to_revisions" for an + # unmerge would be. If there are none, then we're a delete. + to_revisions = self._unmerge_to_revisions(heads) + return not to_revisions + + def merge_branch_idents(self, heads): + other_heads = set(heads).difference(self.from_revisions) + + if other_heads: + ancestors = set( + r.revision + for r in self.revision_map._get_ancestor_nodes( + self.revision_map.get_revisions(other_heads), check=False + ) + ) + from_revisions = list( + set(self.from_revisions).difference(ancestors) + ) + else: + from_revisions = list(self.from_revisions) + + return ( + # delete revs, update from rev, update to rev + list(from_revisions[0:-1]), + from_revisions[-1], + self.to_revisions[0], + ) + + def _unmerge_to_revisions(self, heads): + other_heads = set(heads).difference([self.revision.revision]) + if other_heads: + ancestors = set( + r.revision + for r in self.revision_map._get_ancestor_nodes( + self.revision_map.get_revisions(other_heads), check=False + ) + ) + return list(set(self.to_revisions).difference(ancestors)) + else: + return self.to_revisions + + def unmerge_branch_idents(self, heads): + to_revisions = self._unmerge_to_revisions(heads) + + return ( + # update from rev, update to rev, insert revs + self.from_revisions[0], + to_revisions[-1], + to_revisions[0:-1], + ) + + def should_create_branch(self, heads): + if not self.is_upgrade: + return False + + downrevs = self.revision._all_down_revisions + + if not downrevs: + # is a base + return True + else: + # none of our downrevs are present, so... + # we have to insert our version. This is true whether + # or not there is only one downrev, or multiple (in the latter + # case, we're a merge point.) + if not heads.intersection(downrevs): + return True + else: + return False + + def should_merge_branches(self, heads): + if not self.is_upgrade: + return False + + downrevs = self.revision._all_down_revisions + + if len(downrevs) > 1 and len(heads.intersection(downrevs)) > 1: + return True + + return False + + def should_unmerge_branches(self, heads): + if not self.is_downgrade: + return False + + downrevs = self.revision._all_down_revisions + + if self.revision.revision in heads and len(downrevs) > 1: + return True + + return False + + def update_version_num(self, heads): + if not self._has_scalar_down_revision: + downrev = heads.intersection(self.revision._all_down_revisions) + assert ( + len(downrev) == 1 + ), "Can't do an UPDATE because downrevision is ambiguous" + down_revision = list(downrev)[0] + else: + down_revision = self.revision._all_down_revisions[0] + + if self.is_upgrade: + return down_revision, self.revision.revision + else: + return self.revision.revision, down_revision + + @property + def delete_version_num(self): + return self.revision.revision + + @property + def insert_version_num(self): + return self.revision.revision + + @property + def info(self): + return MigrationInfo( + revision_map=self.revision_map, + up_revisions=self.revision.revision, + down_revisions=self.revision._all_down_revisions, + is_upgrade=self.is_upgrade, + is_stamp=False, + ) + + +class StampStep(MigrationStep): + def __init__(self, from_, to_, is_upgrade, branch_move, revision_map=None): + self.from_ = util.to_tuple(from_, default=()) + self.to_ = util.to_tuple(to_, default=()) + self.is_upgrade = is_upgrade + self.branch_move = branch_move + self.migration_fn = self.stamp_revision + self.revision_map = revision_map + + doc = None + + def stamp_revision(self, **kw): + return None + + def __eq__(self, other): + return ( + isinstance(other, StampStep) + and other.from_revisions == self.revisions + and other.to_revisions == self.to_revisions + and other.branch_move == self.branch_move + and self.is_upgrade == other.is_upgrade + ) + + @property + def from_revisions(self): + return self.from_ + + @property + def to_revisions(self): + return self.to_ + + @property + def from_revisions_no_deps(self): + return self.from_ + + @property + def to_revisions_no_deps(self): + return self.to_ + + @property + def delete_version_num(self): + assert len(self.from_) == 1 + return self.from_[0] + + @property + def insert_version_num(self): + assert len(self.to_) == 1 + return self.to_[0] + + def update_version_num(self, heads): + assert len(self.from_) == 1 + assert len(self.to_) == 1 + return self.from_[0], self.to_[0] + + def merge_branch_idents(self, heads): + return ( + # delete revs, update from rev, update to rev + list(self.from_[0:-1]), + self.from_[-1], + self.to_[0], + ) + + def unmerge_branch_idents(self, heads): + return ( + # update from rev, update to rev, insert revs + self.from_[0], + self.to_[-1], + list(self.to_[0:-1]), + ) + + def should_delete_branch(self, heads): + return self.is_downgrade and self.branch_move + + def should_create_branch(self, heads): + return self.is_upgrade and self.branch_move + + def should_merge_branches(self, heads): + return len(self.from_) > 1 + + def should_unmerge_branches(self, heads): + return len(self.to_) > 1 + + @property + def info(self): + up, down = ( + (self.to_, self.from_) + if self.is_upgrade + else (self.from_, self.to_) + ) + return MigrationInfo( + revision_map=self.revision_map, + up_revisions=up, + down_revisions=down, + is_upgrade=self.is_upgrade, + is_stamp=True, + ) diff --git a/py3/lib/python3.6/site-packages/alembic/script/__init__.py b/py3/lib/python3.6/site-packages/alembic/script/__init__.py new file mode 100644 index 0000000..540d627 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/script/__init__.py @@ -0,0 +1,4 @@ +from .base import Script # noqa +from .base import ScriptDirectory # noqa + +__all__ = ["ScriptDirectory", "Script"] diff --git a/py3/lib/python3.6/site-packages/alembic/script/base.py b/py3/lib/python3.6/site-packages/alembic/script/base.py new file mode 100644 index 0000000..b386dea --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/script/base.py @@ -0,0 +1,905 @@ +from contextlib import contextmanager +import datetime +import os +import re +import shutil + +from dateutil import tz + +from . import revision +from .. import util +from ..runtime import migration +from ..util import compat + +_sourceless_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)(c|o)?$") +_only_source_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)$") +_legacy_rev = re.compile(r"([a-f0-9]+)\.py$") +_mod_def_re = re.compile(r"(upgrade|downgrade)_([a-z0-9]+)") +_slug_re = re.compile(r"\w+") +_default_file_template = "%(rev)s_%(slug)s" +_split_on_space_comma = re.compile(r",|(?: +)") + + +class ScriptDirectory(object): + + """Provides operations upon an Alembic script directory. + + This object is useful to get information as to current revisions, + most notably being able to get at the "head" revision, for schemes + that want to test if the current revision in the database is the most + recent:: + + from alembic.script import ScriptDirectory + from alembic.config import Config + config = Config() + config.set_main_option("script_location", "myapp:migrations") + script = ScriptDirectory.from_config(config) + + head_revision = script.get_current_head() + + + + """ + + def __init__( + self, + dir, # noqa + file_template=_default_file_template, + truncate_slug_length=40, + version_locations=None, + sourceless=False, + output_encoding="utf-8", + timezone=None, + ): + self.dir = dir + self.file_template = file_template + self.version_locations = version_locations + self.truncate_slug_length = truncate_slug_length or 40 + self.sourceless = sourceless + self.output_encoding = output_encoding + self.revision_map = revision.RevisionMap(self._load_revisions) + self.timezone = timezone + + if not os.access(dir, os.F_OK): + raise util.CommandError( + "Path doesn't exist: %r. Please use " + "the 'init' command to create a new " + "scripts folder." % dir + ) + + @property + def versions(self): + loc = self._version_locations + if len(loc) > 1: + raise util.CommandError("Multiple version_locations present") + else: + return loc[0] + + @util.memoized_property + def _version_locations(self): + if self.version_locations: + return [ + os.path.abspath(util.coerce_resource_to_filename(location)) + for location in self.version_locations + ] + else: + return (os.path.abspath(os.path.join(self.dir, "versions")),) + + def _load_revisions(self): + if self.version_locations: + paths = [ + vers + for vers in self._version_locations + if os.path.exists(vers) + ] + else: + paths = [self.versions] + + dupes = set() + for vers in paths: + for file_ in Script._list_py_dir(self, vers): + path = os.path.realpath(os.path.join(vers, file_)) + if path in dupes: + util.warn( + "File %s loaded twice! ignoring. Please ensure " + "version_locations is unique." % path + ) + continue + dupes.add(path) + script = Script._from_filename(self, vers, file_) + if script is None: + continue + yield script + + @classmethod + def from_config(cls, config): + """Produce a new :class:`.ScriptDirectory` given a :class:`.Config` + instance. + + The :class:`.Config` need only have the ``script_location`` key + present. + + """ + script_location = config.get_main_option("script_location") + if script_location is None: + raise util.CommandError( + "No 'script_location' key " "found in configuration." + ) + truncate_slug_length = config.get_main_option("truncate_slug_length") + if truncate_slug_length is not None: + truncate_slug_length = int(truncate_slug_length) + + version_locations = config.get_main_option("version_locations") + if version_locations: + version_locations = _split_on_space_comma.split(version_locations) + + return ScriptDirectory( + util.coerce_resource_to_filename(script_location), + file_template=config.get_main_option( + "file_template", _default_file_template + ), + truncate_slug_length=truncate_slug_length, + sourceless=config.get_main_option("sourceless") == "true", + output_encoding=config.get_main_option("output_encoding", "utf-8"), + version_locations=version_locations, + timezone=config.get_main_option("timezone"), + ) + + @contextmanager + def _catch_revision_errors( + self, + ancestor=None, + multiple_heads=None, + start=None, + end=None, + resolution=None, + ): + try: + yield + except revision.RangeNotAncestorError as rna: + if start is None: + start = rna.lower + if end is None: + end = rna.upper + if not ancestor: + ancestor = ( + "Requested range %(start)s:%(end)s does not refer to " + "ancestor/descendant revisions along the same branch" + ) + ancestor = ancestor % {"start": start, "end": end} + compat.raise_from_cause(util.CommandError(ancestor)) + except revision.MultipleHeads as mh: + if not multiple_heads: + multiple_heads = ( + "Multiple head revisions are present for given " + "argument '%(head_arg)s'; please " + "specify a specific target revision, " + "'@%(head_arg)s' to " + "narrow to a specific head, or 'heads' for all heads" + ) + multiple_heads = multiple_heads % { + "head_arg": end or mh.argument, + "heads": util.format_as_comma(mh.heads), + } + compat.raise_from_cause(util.CommandError(multiple_heads)) + except revision.ResolutionError as re: + if resolution is None: + resolution = "Can't locate revision identified by '%s'" % ( + re.argument + ) + compat.raise_from_cause(util.CommandError(resolution)) + except revision.RevisionError as err: + compat.raise_from_cause(util.CommandError(err.args[0])) + + def walk_revisions(self, base="base", head="heads"): + """Iterate through all revisions. + + :param base: the base revision, or "base" to start from the + empty revision. + + :param head: the head revision; defaults to "heads" to indicate + all head revisions. May also be "head" to indicate a single + head revision. + + .. versionchanged:: 0.7.0 the "head" identifier now refers to + the head of a non-branched repository only; use "heads" to + refer to the set of all head branches simultaneously. + + """ + with self._catch_revision_errors(start=base, end=head): + for rev in self.revision_map.iterate_revisions( + head, base, inclusive=True, assert_relative_length=False + ): + yield rev + + def get_revisions(self, id_): + """Return the :class:`.Script` instance with the given rev identifier, + symbolic name, or sequence of identifiers. + + .. versionadded:: 0.7.0 + + """ + with self._catch_revision_errors(): + return self.revision_map.get_revisions(id_) + + def get_all_current(self, id_): + with self._catch_revision_errors(): + top_revs = set(self.revision_map.get_revisions(id_)) + top_revs.update( + self.revision_map._get_ancestor_nodes( + list(top_revs), include_dependencies=True + ) + ) + top_revs = self.revision_map._filter_into_branch_heads(top_revs) + return top_revs + + def get_revision(self, id_): + """Return the :class:`.Script` instance with the given rev id. + + .. seealso:: + + :meth:`.ScriptDirectory.get_revisions` + + """ + + with self._catch_revision_errors(): + return self.revision_map.get_revision(id_) + + def as_revision_number(self, id_): + """Convert a symbolic revision, i.e. 'head' or 'base', into + an actual revision number.""" + + with self._catch_revision_errors(): + rev, branch_name = self.revision_map._resolve_revision_number(id_) + + if not rev: + # convert () to None + return None + elif id_ == "heads": + return rev + else: + return rev[0] + + def iterate_revisions(self, upper, lower): + """Iterate through script revisions, starting at the given + upper revision identifier and ending at the lower. + + The traversal uses strictly the `down_revision` + marker inside each migration script, so + it is a requirement that upper >= lower, + else you'll get nothing back. + + The iterator yields :class:`.Script` objects. + + .. seealso:: + + :meth:`.RevisionMap.iterate_revisions` + + """ + return self.revision_map.iterate_revisions(upper, lower) + + def get_current_head(self): + """Return the current head revision. + + If the script directory has multiple heads + due to branching, an error is raised; + :meth:`.ScriptDirectory.get_heads` should be + preferred. + + :return: a string revision number. + + .. seealso:: + + :meth:`.ScriptDirectory.get_heads` + + """ + with self._catch_revision_errors( + multiple_heads=( + "The script directory has multiple heads (due to branching)." + "Please use get_heads(), or merge the branches using " + "alembic merge." + ) + ): + return self.revision_map.get_current_head() + + def get_heads(self): + """Return all "versioned head" revisions as strings. + + This is normally a list of length one, + unless branches are present. The + :meth:`.ScriptDirectory.get_current_head()` method + can be used normally when a script directory + has only one head. + + :return: a tuple of string revision numbers. + """ + return list(self.revision_map.heads) + + def get_base(self): + """Return the "base" revision as a string. + + This is the revision number of the script that + has a ``down_revision`` of None. + + If the script directory has multiple bases, an error is raised; + :meth:`.ScriptDirectory.get_bases` should be + preferred. + + """ + bases = self.get_bases() + if len(bases) > 1: + raise util.CommandError( + "The script directory has multiple bases. " + "Please use get_bases()." + ) + elif bases: + return bases[0] + else: + return None + + def get_bases(self): + """return all "base" revisions as strings. + + This is the revision number of all scripts that + have a ``down_revision`` of None. + + .. versionadded:: 0.7.0 + + """ + return list(self.revision_map.bases) + + def _upgrade_revs(self, destination, current_rev): + with self._catch_revision_errors( + ancestor="Destination %(end)s is not a valid upgrade " + "target from current head(s)", + end=destination, + ): + revs = self.revision_map.iterate_revisions( + destination, current_rev, implicit_base=True + ) + revs = list(revs) + return [ + migration.MigrationStep.upgrade_from_script( + self.revision_map, script + ) + for script in reversed(list(revs)) + ] + + def _downgrade_revs(self, destination, current_rev): + with self._catch_revision_errors( + ancestor="Destination %(end)s is not a valid downgrade " + "target from current head(s)", + end=destination, + ): + revs = self.revision_map.iterate_revisions( + current_rev, destination, select_for_downgrade=True + ) + return [ + migration.MigrationStep.downgrade_from_script( + self.revision_map, script + ) + for script in revs + ] + + def _stamp_revs(self, revision, heads): + with self._catch_revision_errors( + multiple_heads="Multiple heads are present; please specify a " + "single target revision" + ): + + heads = self.get_revisions(heads) + + # filter for lineage will resolve things like + # branchname@base, version@base, etc. + filtered_heads = self.revision_map.filter_for_lineage( + heads, revision, include_dependencies=True + ) + + steps = [] + + dests = self.get_revisions(revision) or [None] + for dest in dests: + if dest is None: + # dest is 'base'. Return a "delete branch" migration + # for all applicable heads. + steps.extend( + [ + migration.StampStep( + head.revision, + None, + False, + True, + self.revision_map, + ) + for head in filtered_heads + ] + ) + continue + elif dest in filtered_heads: + # the dest is already in the version table, do nothing. + continue + + # figure out if the dest is a descendant or an + # ancestor of the selected nodes + descendants = set( + self.revision_map._get_descendant_nodes([dest]) + ) + ancestors = set(self.revision_map._get_ancestor_nodes([dest])) + + if descendants.intersection(filtered_heads): + # heads are above the target, so this is a downgrade. + # we can treat them as a "merge", single step. + assert not ancestors.intersection(filtered_heads) + todo_heads = [head.revision for head in filtered_heads] + step = migration.StampStep( + todo_heads, + dest.revision, + False, + False, + self.revision_map, + ) + steps.append(step) + continue + elif ancestors.intersection(filtered_heads): + # heads are below the target, so this is an upgrade. + # we can treat them as a "merge", single step. + todo_heads = [head.revision for head in filtered_heads] + step = migration.StampStep( + todo_heads, + dest.revision, + True, + False, + self.revision_map, + ) + steps.append(step) + continue + else: + # destination is in a branch not represented, + # treat it as new branch + step = migration.StampStep( + (), dest.revision, True, True, self.revision_map + ) + steps.append(step) + continue + return steps + + def run_env(self): + """Run the script environment. + + This basically runs the ``env.py`` script present + in the migration environment. It is called exclusively + by the command functions in :mod:`alembic.command`. + + + """ + util.load_python_file(self.dir, "env.py") + + @property + def env_py_location(self): + return os.path.abspath(os.path.join(self.dir, "env.py")) + + def _generate_template(self, src, dest, **kw): + util.status( + "Generating %s" % os.path.abspath(dest), + util.template_to_file, + src, + dest, + self.output_encoding, + **kw + ) + + def _copy_file(self, src, dest): + util.status( + "Generating %s" % os.path.abspath(dest), shutil.copy, src, dest + ) + + def _ensure_directory(self, path): + path = os.path.abspath(path) + if not os.path.exists(path): + util.status("Creating directory %s" % path, os.makedirs, path) + + def _generate_create_date(self): + if self.timezone is not None: + # First, assume correct capitalization + tzinfo = tz.gettz(self.timezone) + if tzinfo is None: + # Fall back to uppercase + tzinfo = tz.gettz(self.timezone.upper()) + if tzinfo is None: + raise util.CommandError( + "Can't locate timezone: %s" % self.timezone + ) + create_date = ( + datetime.datetime.utcnow() + .replace(tzinfo=tz.tzutc()) + .astimezone(tzinfo) + ) + else: + create_date = datetime.datetime.now() + return create_date + + def generate_revision( + self, + revid, + message, + head=None, + refresh=False, + splice=False, + branch_labels=None, + version_path=None, + depends_on=None, + **kw + ): + """Generate a new revision file. + + This runs the ``script.py.mako`` template, given + template arguments, and creates a new file. + + :param revid: String revision id. Typically this + comes from ``alembic.util.rev_id()``. + :param message: the revision message, the one passed + by the -m argument to the ``revision`` command. + :param head: the head revision to generate against. Defaults + to the current "head" if no branches are present, else raises + an exception. + + .. versionadded:: 0.7.0 + + :param splice: if True, allow the "head" version to not be an + actual head; otherwise, the selected head must be a head + (e.g. endpoint) revision. + :param refresh: deprecated. + + """ + if head is None: + head = "head" + + try: + Script.verify_rev_id(revid) + except revision.RevisionError as err: + compat.raise_from_cause(util.CommandError(err.args[0])) + + with self._catch_revision_errors( + multiple_heads=( + "Multiple heads are present; please specify the head " + "revision on which the new revision should be based, " + "or perform a merge." + ) + ): + heads = self.revision_map.get_revisions(head) + + if len(set(heads)) != len(heads): + raise util.CommandError("Duplicate head revisions specified") + + create_date = self._generate_create_date() + + if version_path is None: + if len(self._version_locations) > 1: + for head in heads: + if head is not None: + version_path = os.path.dirname(head.path) + break + else: + raise util.CommandError( + "Multiple version locations present, " + "please specify --version-path" + ) + else: + version_path = self.versions + + norm_path = os.path.normpath(os.path.abspath(version_path)) + for vers_path in self._version_locations: + if os.path.normpath(vers_path) == norm_path: + break + else: + raise util.CommandError( + "Path %s is not represented in current " + "version locations" % version_path + ) + + if self.version_locations: + self._ensure_directory(version_path) + + path = self._rev_path(version_path, revid, message, create_date) + + if not splice: + for head in heads: + if head is not None and not head.is_head: + raise util.CommandError( + "Revision %s is not a head revision; please specify " + "--splice to create a new branch from this revision" + % head.revision + ) + + if depends_on: + with self._catch_revision_errors(): + depends_on = [ + dep + if dep in rev.branch_labels # maintain branch labels + else rev.revision # resolve partial revision identifiers + for rev, dep in [ + (self.revision_map.get_revision(dep), dep) + for dep in util.to_list(depends_on) + ] + ] + + self._generate_template( + os.path.join(self.dir, "script.py.mako"), + path, + up_revision=str(revid), + down_revision=revision.tuple_rev_as_scalar( + tuple(h.revision if h is not None else None for h in heads) + ), + branch_labels=util.to_tuple(branch_labels), + depends_on=revision.tuple_rev_as_scalar(depends_on), + create_date=create_date, + comma=util.format_as_comma, + message=message if message is not None else ("empty message"), + **kw + ) + try: + script = Script._from_path(self, path) + except revision.RevisionError as err: + compat.raise_from_cause(util.CommandError(err.args[0])) + if branch_labels and not script.branch_labels: + raise util.CommandError( + "Version %s specified branch_labels %s, however the " + "migration file %s does not have them; have you upgraded " + "your script.py.mako to include the " + "'branch_labels' section?" + % (script.revision, branch_labels, script.path) + ) + + self.revision_map.add_revision(script) + return script + + def _rev_path(self, path, rev_id, message, create_date): + slug = "_".join(_slug_re.findall(message or "")).lower() + if len(slug) > self.truncate_slug_length: + slug = slug[: self.truncate_slug_length].rsplit("_", 1)[0] + "_" + filename = "%s.py" % ( + self.file_template + % { + "rev": rev_id, + "slug": slug, + "year": create_date.year, + "month": create_date.month, + "day": create_date.day, + "hour": create_date.hour, + "minute": create_date.minute, + "second": create_date.second, + } + ) + return os.path.join(path, filename) + + +class Script(revision.Revision): + + """Represent a single revision file in a ``versions/`` directory. + + The :class:`.Script` instance is returned by methods + such as :meth:`.ScriptDirectory.iterate_revisions`. + + """ + + def __init__(self, module, rev_id, path): + self.module = module + self.path = path + super(Script, self).__init__( + rev_id, + module.down_revision, + branch_labels=util.to_tuple( + getattr(module, "branch_labels", None), default=() + ), + dependencies=util.to_tuple( + getattr(module, "depends_on", None), default=() + ), + ) + + module = None + """The Python module representing the actual script itself.""" + + path = None + """Filesystem path of the script.""" + + _db_current_indicator = None + """Utility variable which when set will cause string output to indicate + this is a "current" version in some database""" + + @property + def doc(self): + """Return the docstring given in the script.""" + + return re.split("\n\n", self.longdoc)[0] + + @property + def longdoc(self): + """Return the docstring given in the script.""" + + doc = self.module.__doc__ + if doc: + if hasattr(self.module, "_alembic_source_encoding"): + doc = doc.decode(self.module._alembic_source_encoding) + return doc.strip() + else: + return "" + + @property + def log_entry(self): + entry = "Rev: %s%s%s%s%s\n" % ( + self.revision, + " (head)" if self.is_head else "", + " (branchpoint)" if self.is_branch_point else "", + " (mergepoint)" if self.is_merge_point else "", + " (current)" if self._db_current_indicator else "", + ) + if self.is_merge_point: + entry += "Merges: %s\n" % (self._format_down_revision(),) + else: + entry += "Parent: %s\n" % (self._format_down_revision(),) + + if self.dependencies: + entry += "Also depends on: %s\n" % ( + util.format_as_comma(self.dependencies) + ) + + if self.is_branch_point: + entry += "Branches into: %s\n" % ( + util.format_as_comma(self.nextrev) + ) + + if self.branch_labels: + entry += "Branch names: %s\n" % ( + util.format_as_comma(self.branch_labels), + ) + + entry += "Path: %s\n" % (self.path,) + + entry += "\n%s\n" % ( + "\n".join(" %s" % para for para in self.longdoc.splitlines()) + ) + return entry + + def __str__(self): + return "%s -> %s%s%s%s, %s" % ( + self._format_down_revision(), + self.revision, + " (head)" if self.is_head else "", + " (branchpoint)" if self.is_branch_point else "", + " (mergepoint)" if self.is_merge_point else "", + self.doc, + ) + + def _head_only( + self, + include_branches=False, + include_doc=False, + include_parents=False, + tree_indicators=True, + head_indicators=True, + ): + text = self.revision + if include_parents: + if self.dependencies: + text = "%s (%s) -> %s" % ( + self._format_down_revision(), + util.format_as_comma(self.dependencies), + text, + ) + else: + text = "%s -> %s" % (self._format_down_revision(), text) + if include_branches and self.branch_labels: + text += " (%s)" % util.format_as_comma(self.branch_labels) + if head_indicators or tree_indicators: + text += "%s%s%s" % ( + " (head)" if self._is_real_head else "", + " (effective head)" + if self.is_head and not self._is_real_head + else "", + " (current)" if self._db_current_indicator else "", + ) + if tree_indicators: + text += "%s%s" % ( + " (branchpoint)" if self.is_branch_point else "", + " (mergepoint)" if self.is_merge_point else "", + ) + if include_doc: + text += ", %s" % self.doc + return text + + def cmd_format( + self, + verbose, + include_branches=False, + include_doc=False, + include_parents=False, + tree_indicators=True, + ): + if verbose: + return self.log_entry + else: + return self._head_only( + include_branches, include_doc, include_parents, tree_indicators + ) + + def _format_down_revision(self): + if not self.down_revision: + return "" + else: + return util.format_as_comma(self._versioned_down_revisions) + + @classmethod + def _from_path(cls, scriptdir, path): + dir_, filename = os.path.split(path) + return cls._from_filename(scriptdir, dir_, filename) + + @classmethod + def _list_py_dir(cls, scriptdir, path): + if scriptdir.sourceless: + # read files in version path, e.g. pyc or pyo files + # in the immediate path + paths = os.listdir(path) + + names = set(fname.split(".")[0] for fname in paths) + + # look for __pycache__ + if os.path.exists(os.path.join(path, "__pycache__")): + # add all files from __pycache__ whose filename is not + # already in the names we got from the version directory. + # add as relative paths including __pycache__ token + paths.extend( + os.path.join("__pycache__", pyc) + for pyc in os.listdir(os.path.join(path, "__pycache__")) + if pyc.split(".")[0] not in names + ) + return paths + else: + return os.listdir(path) + + @classmethod + def _from_filename(cls, scriptdir, dir_, filename): + if scriptdir.sourceless: + py_match = _sourceless_rev_file.match(filename) + else: + py_match = _only_source_rev_file.match(filename) + + if not py_match: + return None + + py_filename = py_match.group(1) + + if scriptdir.sourceless: + is_c = py_match.group(2) == "c" + is_o = py_match.group(2) == "o" + else: + is_c = is_o = False + + if is_o or is_c: + py_exists = os.path.exists(os.path.join(dir_, py_filename)) + pyc_exists = os.path.exists(os.path.join(dir_, py_filename + "c")) + + # prefer .py over .pyc because we'd like to get the + # source encoding; prefer .pyc over .pyo because we'd like to + # have the docstrings which a -OO file would not have + if py_exists or is_o and pyc_exists: + return None + + module = util.load_python_file(dir_, filename) + + if not hasattr(module, "revision"): + # attempt to get the revision id from the script name, + # this for legacy only + m = _legacy_rev.match(filename) + if not m: + raise util.CommandError( + "Could not determine revision id from filename %s. " + "Be sure the 'revision' variable is " + "declared inside the script (please see 'Upgrading " + "from Alembic 0.1 to 0.2' in the documentation)." + % filename + ) + else: + revision = m.group(1) + else: + revision = module.revision + return Script(module, revision, os.path.join(dir_, filename)) diff --git a/py3/lib/python3.6/site-packages/alembic/script/revision.py b/py3/lib/python3.6/site-packages/alembic/script/revision.py new file mode 100644 index 0000000..43c757e --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/script/revision.py @@ -0,0 +1,1053 @@ +import collections +import re + +from sqlalchemy import util as sqlautil + +from .. import util +from ..util import compat + +_relative_destination = re.compile(r"(?:(.+?)@)?(\w+)?((?:\+|-)\d+)") +_revision_illegal_chars = ["@", "-", "+"] + + +class RevisionError(Exception): + pass + + +class RangeNotAncestorError(RevisionError): + def __init__(self, lower, upper): + self.lower = lower + self.upper = upper + super(RangeNotAncestorError, self).__init__( + "Revision %s is not an ancestor of revision %s" + % (lower or "base", upper or "base") + ) + + +class MultipleHeads(RevisionError): + def __init__(self, heads, argument): + self.heads = heads + self.argument = argument + super(MultipleHeads, self).__init__( + "Multiple heads are present for given argument '%s'; " + "%s" % (argument, ", ".join(heads)) + ) + + +class ResolutionError(RevisionError): + def __init__(self, message, argument): + super(ResolutionError, self).__init__(message) + self.argument = argument + + +class RevisionMap(object): + """Maintains a map of :class:`.Revision` objects. + + :class:`.RevisionMap` is used by :class:`.ScriptDirectory` to maintain + and traverse the collection of :class:`.Script` objects, which are + themselves instances of :class:`.Revision`. + + """ + + def __init__(self, generator): + """Construct a new :class:`.RevisionMap`. + + :param generator: a zero-arg callable that will generate an iterable + of :class:`.Revision` instances to be used. These are typically + :class:`.Script` subclasses within regular Alembic use. + + """ + self._generator = generator + + @util.memoized_property + def heads(self): + """All "head" revisions as strings. + + This is normally a tuple of length one, + unless unmerged branches are present. + + :return: a tuple of string revision numbers. + + """ + self._revision_map + return self.heads + + @util.memoized_property + def bases(self): + """All "base" revisions as strings. + + These are revisions that have a ``down_revision`` of None, + or empty tuple. + + :return: a tuple of string revision numbers. + + """ + self._revision_map + return self.bases + + @util.memoized_property + def _real_heads(self): + """All "real" head revisions as strings. + + :return: a tuple of string revision numbers. + + """ + self._revision_map + return self._real_heads + + @util.memoized_property + def _real_bases(self): + """All "real" base revisions as strings. + + :return: a tuple of string revision numbers. + + """ + self._revision_map + return self._real_bases + + @util.memoized_property + def _revision_map(self): + """memoized attribute, initializes the revision map from the + initial collection. + + """ + map_ = {} + + heads = sqlautil.OrderedSet() + _real_heads = sqlautil.OrderedSet() + self.bases = () + self._real_bases = () + + has_branch_labels = set() + has_depends_on = set() + for revision in self._generator(): + + if revision.revision in map_: + util.warn( + "Revision %s is present more than once" % revision.revision + ) + map_[revision.revision] = revision + if revision.branch_labels: + has_branch_labels.add(revision) + if revision.dependencies: + has_depends_on.add(revision) + heads.add(revision.revision) + _real_heads.add(revision.revision) + if revision.is_base: + self.bases += (revision.revision,) + if revision._is_real_base: + self._real_bases += (revision.revision,) + + # add the branch_labels to the map_. We'll need these + # to resolve the dependencies. + for revision in has_branch_labels: + self._map_branch_labels(revision, map_) + + for revision in has_depends_on: + self._add_depends_on(revision, map_) + + for rev in map_.values(): + for downrev in rev._all_down_revisions: + if downrev not in map_: + util.warn( + "Revision %s referenced from %s is not present" + % (downrev, rev) + ) + down_revision = map_[downrev] + down_revision.add_nextrev(rev) + if downrev in rev._versioned_down_revisions: + heads.discard(downrev) + _real_heads.discard(downrev) + + map_[None] = map_[()] = None + self.heads = tuple(heads) + self._real_heads = tuple(_real_heads) + + for revision in has_branch_labels: + self._add_branches(revision, map_, map_branch_labels=False) + return map_ + + def _map_branch_labels(self, revision, map_): + if revision.branch_labels: + for branch_label in revision._orig_branch_labels: + if branch_label in map_: + raise RevisionError( + "Branch name '%s' in revision %s already " + "used by revision %s" + % ( + branch_label, + revision.revision, + map_[branch_label].revision, + ) + ) + map_[branch_label] = revision + + def _add_branches(self, revision, map_, map_branch_labels=True): + if map_branch_labels: + self._map_branch_labels(revision, map_) + + if revision.branch_labels: + revision.branch_labels.update(revision.branch_labels) + for node in self._get_descendant_nodes( + [revision], map_, include_dependencies=False + ): + node.branch_labels.update(revision.branch_labels) + + parent = node + while ( + parent + and not parent._is_real_branch_point + and not parent.is_merge_point + ): + + parent.branch_labels.update(revision.branch_labels) + if parent.down_revision: + parent = map_[parent.down_revision] + else: + break + + def _add_depends_on(self, revision, map_): + if revision.dependencies: + deps = [map_[dep] for dep in util.to_tuple(revision.dependencies)] + revision._resolved_dependencies = tuple([d.revision for d in deps]) + + def add_revision(self, revision, _replace=False): + """add a single revision to an existing map. + + This method is for single-revision use cases, it's not + appropriate for fully populating an entire revision map. + + """ + map_ = self._revision_map + if not _replace and revision.revision in map_: + util.warn( + "Revision %s is present more than once" % revision.revision + ) + elif _replace and revision.revision not in map_: + raise Exception("revision %s not in map" % revision.revision) + + map_[revision.revision] = revision + self._add_branches(revision, map_) + self._add_depends_on(revision, map_) + + if revision.is_base: + self.bases += (revision.revision,) + if revision._is_real_base: + self._real_bases += (revision.revision,) + for downrev in revision._all_down_revisions: + if downrev not in map_: + util.warn( + "Revision %s referenced from %s is not present" + % (downrev, revision) + ) + map_[downrev].add_nextrev(revision) + if revision._is_real_head: + self._real_heads = tuple( + head + for head in self._real_heads + if head + not in set(revision._all_down_revisions).union( + [revision.revision] + ) + ) + (revision.revision,) + if revision.is_head: + self.heads = tuple( + head + for head in self.heads + if head + not in set(revision._versioned_down_revisions).union( + [revision.revision] + ) + ) + (revision.revision,) + + def get_current_head(self, branch_label=None): + """Return the current head revision. + + If the script directory has multiple heads + due to branching, an error is raised; + :meth:`.ScriptDirectory.get_heads` should be + preferred. + + :param branch_label: optional branch name which will limit the + heads considered to those which include that branch_label. + + :return: a string revision number. + + .. seealso:: + + :meth:`.ScriptDirectory.get_heads` + + """ + current_heads = self.heads + if branch_label: + current_heads = self.filter_for_lineage( + current_heads, branch_label + ) + if len(current_heads) > 1: + raise MultipleHeads( + current_heads, + "%s@head" % branch_label if branch_label else "head", + ) + + if current_heads: + return current_heads[0] + else: + return None + + def _get_base_revisions(self, identifier): + return self.filter_for_lineage(self.bases, identifier) + + def get_revisions(self, id_): + """Return the :class:`.Revision` instances with the given rev id + or identifiers. + + May be given a single identifier, a sequence of identifiers, or the + special symbols "head" or "base". The result is a tuple of one + or more identifiers, or an empty tuple in the case of "base". + + In the cases where 'head', 'heads' is requested and the + revision map is empty, returns an empty tuple. + + Supports partial identifiers, where the given identifier + is matched against all identifiers that start with the given + characters; if there is exactly one match, that determines the + full revision. + + """ + + if isinstance(id_, (list, tuple, set, frozenset)): + return sum([self.get_revisions(id_elem) for id_elem in id_], ()) + else: + resolved_id, branch_label = self._resolve_revision_number(id_) + return tuple( + self._revision_for_ident(rev_id, branch_label) + for rev_id in resolved_id + ) + + def get_revision(self, id_): + """Return the :class:`.Revision` instance with the given rev id. + + If a symbolic name such as "head" or "base" is given, resolves + the identifier into the current head or base revision. If the symbolic + name refers to multiples, :class:`.MultipleHeads` is raised. + + Supports partial identifiers, where the given identifier + is matched against all identifiers that start with the given + characters; if there is exactly one match, that determines the + full revision. + + """ + + resolved_id, branch_label = self._resolve_revision_number(id_) + if len(resolved_id) > 1: + raise MultipleHeads(resolved_id, id_) + elif resolved_id: + resolved_id = resolved_id[0] + + return self._revision_for_ident(resolved_id, branch_label) + + def _resolve_branch(self, branch_label): + try: + branch_rev = self._revision_map[branch_label] + except KeyError: + try: + nonbranch_rev = self._revision_for_ident(branch_label) + except ResolutionError: + raise ResolutionError( + "No such branch: '%s'" % branch_label, branch_label + ) + else: + return nonbranch_rev + else: + return branch_rev + + def _revision_for_ident(self, resolved_id, check_branch=None): + if check_branch: + branch_rev = self._resolve_branch(check_branch) + else: + branch_rev = None + + try: + revision = self._revision_map[resolved_id] + except KeyError: + # break out to avoid misleading py3k stack traces + revision = False + if revision is False: + # do a partial lookup + revs = [ + x + for x in self._revision_map + if x and x.startswith(resolved_id) + ] + if branch_rev: + revs = self.filter_for_lineage(revs, check_branch) + if not revs: + raise ResolutionError( + "No such revision or branch '%s'" % resolved_id, + resolved_id, + ) + elif len(revs) > 1: + raise ResolutionError( + "Multiple revisions start " + "with '%s': %s..." + % (resolved_id, ", ".join("'%s'" % r for r in revs[0:3])), + resolved_id, + ) + else: + revision = self._revision_map[revs[0]] + + if check_branch and revision is not None: + if not self._shares_lineage( + revision.revision, branch_rev.revision + ): + raise ResolutionError( + "Revision %s is not a member of branch '%s'" + % (revision.revision, check_branch), + resolved_id, + ) + return revision + + def _filter_into_branch_heads(self, targets): + targets = set(targets) + + for rev in list(targets): + if targets.intersection( + self._get_descendant_nodes([rev], include_dependencies=False) + ).difference([rev]): + targets.discard(rev) + return targets + + def filter_for_lineage( + self, targets, check_against, include_dependencies=False + ): + id_, branch_label = self._resolve_revision_number(check_against) + + shares = [] + if branch_label: + shares.append(branch_label) + if id_: + shares.extend(id_) + + return [ + tg + for tg in targets + if self._shares_lineage( + tg, shares, include_dependencies=include_dependencies + ) + ] + + def _shares_lineage( + self, target, test_against_revs, include_dependencies=False + ): + if not test_against_revs: + return True + if not isinstance(target, Revision): + target = self._revision_for_ident(target) + + test_against_revs = [ + self._revision_for_ident(test_against_rev) + if not isinstance(test_against_rev, Revision) + else test_against_rev + for test_against_rev in util.to_tuple( + test_against_revs, default=() + ) + ] + + return bool( + set( + self._get_descendant_nodes( + [target], include_dependencies=include_dependencies + ) + ) + .union( + self._get_ancestor_nodes( + [target], include_dependencies=include_dependencies + ) + ) + .intersection(test_against_revs) + ) + + def _resolve_revision_number(self, id_): + if isinstance(id_, compat.string_types) and "@" in id_: + branch_label, id_ = id_.split("@", 1) + + elif id_ is not None and ( + ( + isinstance(id_, tuple) + and id_ + and not isinstance(id_[0], compat.string_types) + ) + or not isinstance(id_, compat.string_types + (tuple, )) + ): + raise RevisionError( + "revision identifier %r is not a string; ensure database " + "driver settings are correct" % (id_,) + ) + + else: + branch_label = None + + # ensure map is loaded + self._revision_map + if id_ == "heads": + if branch_label: + return ( + self.filter_for_lineage(self.heads, branch_label), + branch_label, + ) + else: + return self._real_heads, branch_label + elif id_ == "head": + current_head = self.get_current_head(branch_label) + if current_head: + return (current_head,), branch_label + else: + return (), branch_label + elif id_ == "base" or id_ is None: + return (), branch_label + else: + return util.to_tuple(id_, default=None), branch_label + + def _relative_iterate( + self, + destination, + source, + is_upwards, + implicit_base, + inclusive, + assert_relative_length, + ): + if isinstance(destination, compat.string_types): + match = _relative_destination.match(destination) + if not match: + return None + else: + return None + + relative = int(match.group(3)) + symbol = match.group(2) + branch_label = match.group(1) + + reldelta = 1 if inclusive and not symbol else 0 + + if is_upwards: + if branch_label: + from_ = "%s@head" % branch_label + elif symbol: + if symbol.startswith("head"): + from_ = symbol + else: + from_ = "%s@head" % symbol + else: + from_ = "head" + to_ = source + else: + if branch_label: + to_ = "%s@base" % branch_label + elif symbol: + to_ = "%s@base" % symbol + else: + to_ = "base" + from_ = source + + revs = list( + self._iterate_revisions( + from_, to_, inclusive=inclusive, implicit_base=implicit_base + ) + ) + + if symbol: + if branch_label: + symbol_rev = self.get_revision( + "%s@%s" % (branch_label, symbol) + ) + else: + symbol_rev = self.get_revision(symbol) + if symbol.startswith("head"): + index = 0 + elif symbol == "base": + index = len(revs) - 1 + else: + range_ = compat.range(len(revs) - 1, 0, -1) + for index in range_: + if symbol_rev.revision == revs[index].revision: + break + else: + index = 0 + else: + index = 0 + if is_upwards: + revs = revs[index - relative - reldelta :] + if ( + not index + and assert_relative_length + and len(revs) < abs(relative - reldelta) + ): + raise RevisionError( + "Relative revision %s didn't " + "produce %d migrations" % (destination, abs(relative)) + ) + else: + revs = revs[0 : index - relative + reldelta] + if ( + not index + and assert_relative_length + and len(revs) != abs(relative) + reldelta + ): + raise RevisionError( + "Relative revision %s didn't " + "produce %d migrations" % (destination, abs(relative)) + ) + + return iter(revs) + + def iterate_revisions( + self, + upper, + lower, + implicit_base=False, + inclusive=False, + assert_relative_length=True, + select_for_downgrade=False, + ): + """Iterate through script revisions, starting at the given + upper revision identifier and ending at the lower. + + The traversal uses strictly the `down_revision` + marker inside each migration script, so + it is a requirement that upper >= lower, + else you'll get nothing back. + + The iterator yields :class:`.Revision` objects. + + """ + + relative_upper = self._relative_iterate( + upper, + lower, + True, + implicit_base, + inclusive, + assert_relative_length, + ) + if relative_upper: + return relative_upper + + relative_lower = self._relative_iterate( + lower, + upper, + False, + implicit_base, + inclusive, + assert_relative_length, + ) + if relative_lower: + return relative_lower + + return self._iterate_revisions( + upper, + lower, + inclusive=inclusive, + implicit_base=implicit_base, + select_for_downgrade=select_for_downgrade, + ) + + def _get_descendant_nodes( + self, + targets, + map_=None, + check=False, + omit_immediate_dependencies=False, + include_dependencies=True, + ): + + if omit_immediate_dependencies: + + def fn(rev): + if rev not in targets: + return rev._all_nextrev + else: + return rev.nextrev + + elif include_dependencies: + + def fn(rev): + return rev._all_nextrev + + else: + + def fn(rev): + return rev.nextrev + + return self._iterate_related_revisions( + fn, targets, map_=map_, check=check + ) + + def _get_ancestor_nodes( + self, targets, map_=None, check=False, include_dependencies=True + ): + + if include_dependencies: + + def fn(rev): + return rev._all_down_revisions + + else: + + def fn(rev): + return rev._versioned_down_revisions + + return self._iterate_related_revisions( + fn, targets, map_=map_, check=check + ) + + def _iterate_related_revisions(self, fn, targets, map_, check=False): + if map_ is None: + map_ = self._revision_map + + seen = set() + todo = collections.deque() + for target in targets: + + todo.append(target) + if check: + per_target = set() + + while todo: + rev = todo.pop() + if check: + per_target.add(rev) + + if rev in seen: + continue + seen.add(rev) + todo.extend(map_[rev_id] for rev_id in fn(rev)) + yield rev + if check: + overlaps = per_target.intersection(targets).difference( + [target] + ) + if overlaps: + raise RevisionError( + "Requested revision %s overlaps with " + "other requested revisions %s" + % ( + target.revision, + ", ".join(r.revision for r in overlaps), + ) + ) + + def _iterate_revisions( + self, + upper, + lower, + inclusive=True, + implicit_base=False, + select_for_downgrade=False, + ): + """iterate revisions from upper to lower. + + The traversal is depth-first within branches, and breadth-first + across branches as a whole. + + """ + + requested_lowers = self.get_revisions(lower) + + # some complexity to accommodate an iteration where some + # branches are starting from nothing, and others are starting + # from a given point. Additionally, if the bottom branch + # is specified using a branch identifier, then we limit operations + # to just that branch. + + limit_to_lower_branch = isinstance( + lower, compat.string_types + ) and lower.endswith("@base") + + uppers = util.dedupe_tuple(self.get_revisions(upper)) + + if not uppers and not requested_lowers: + return + + upper_ancestors = set(self._get_ancestor_nodes(uppers, check=True)) + + if limit_to_lower_branch: + lowers = self.get_revisions(self._get_base_revisions(lower)) + elif implicit_base and requested_lowers: + lower_ancestors = set(self._get_ancestor_nodes(requested_lowers)) + lower_descendants = set( + self._get_descendant_nodes(requested_lowers) + ) + base_lowers = set() + candidate_lowers = upper_ancestors.difference( + lower_ancestors + ).difference(lower_descendants) + for rev in candidate_lowers: + for downrev in rev._all_down_revisions: + if self._revision_map[downrev] in candidate_lowers: + break + else: + base_lowers.add(rev) + lowers = base_lowers.union(requested_lowers) + elif implicit_base: + base_lowers = set(self.get_revisions(self._real_bases)) + lowers = base_lowers.union(requested_lowers) + elif not requested_lowers: + lowers = set(self.get_revisions(self._real_bases)) + else: + lowers = requested_lowers + + # represents all nodes we will produce + total_space = set( + rev.revision for rev in upper_ancestors + ).intersection( + rev.revision + for rev in self._get_descendant_nodes( + lowers, + check=True, + omit_immediate_dependencies=( + select_for_downgrade and requested_lowers + ), + ) + ) + + if not total_space: + # no nodes. determine if this is an invalid range + # or not. + start_from = set(requested_lowers) + start_from.update( + self._get_ancestor_nodes( + list(start_from), include_dependencies=True + ) + ) + + # determine all the current branch points represented + # by requested_lowers + start_from = self._filter_into_branch_heads(start_from) + + # if the requested start is one of those branch points, + # then just return empty set + if start_from.intersection(upper_ancestors): + return + else: + # otherwise, they requested nodes out of + # order + raise RangeNotAncestorError(lower, upper) + + # organize branch points to be consumed separately from + # member nodes + branch_todo = set( + rev + for rev in (self._revision_map[rev] for rev in total_space) + if rev._is_real_branch_point + and len(total_space.intersection(rev._all_nextrev)) > 1 + ) + + # it's not possible for any "uppers" to be in branch_todo, + # because the ._all_nextrev of those nodes is not in total_space + # assert not branch_todo.intersection(uppers) + + todo = collections.deque( + r for r in uppers if r.revision in total_space + ) + + # iterate for total_space being emptied out + total_space_modified = True + while total_space: + + if not total_space_modified: + raise RevisionError( + "Dependency resolution failed; iteration can't proceed" + ) + total_space_modified = False + # when everything non-branch pending is consumed, + # add to the todo any branch nodes that have no + # descendants left in the queue + if not todo: + todo.extendleft( + sorted( + ( + rev + for rev in branch_todo + if not rev._all_nextrev.intersection(total_space) + ), + # favor "revisioned" branch points before + # dependent ones + key=lambda rev: 0 if rev.is_branch_point else 1, + ) + ) + branch_todo.difference_update(todo) + # iterate nodes that are in the immediate todo + while todo: + rev = todo.popleft() + total_space.remove(rev.revision) + total_space_modified = True + + # do depth first for elements within branches, + # don't consume any actual branch nodes + todo.extendleft( + [ + self._revision_map[downrev] + for downrev in reversed(rev._all_down_revisions) + if self._revision_map[downrev] not in branch_todo + and downrev in total_space + ] + ) + + if not inclusive and rev in requested_lowers: + continue + yield rev + + assert not branch_todo + + +class Revision(object): + """Base class for revisioned objects. + + The :class:`.Revision` class is the base of the more public-facing + :class:`.Script` object, which represents a migration script. + The mechanics of revision management and traversal are encapsulated + within :class:`.Revision`, while :class:`.Script` applies this logic + to Python files in a version directory. + + """ + + nextrev = frozenset() + """following revisions, based on down_revision only.""" + + _all_nextrev = frozenset() + + revision = None + """The string revision number.""" + + down_revision = None + """The ``down_revision`` identifier(s) within the migration script. + + Note that the total set of "down" revisions is + down_revision + dependencies. + + """ + + dependencies = None + """Additional revisions which this revision is dependent on. + + From a migration standpoint, these dependencies are added to the + down_revision to form the full iteration. However, the separation + of down_revision from "dependencies" is to assist in navigating + a history that contains many branches, typically a multi-root scenario. + + """ + + branch_labels = None + """Optional string/tuple of symbolic names to apply to this + revision's branch""" + + @classmethod + def verify_rev_id(cls, revision): + illegal_chars = set(revision).intersection(_revision_illegal_chars) + if illegal_chars: + raise RevisionError( + "Character(s) '%s' not allowed in revision identifier '%s'" + % (", ".join(sorted(illegal_chars)), revision) + ) + + def __init__( + self, revision, down_revision, dependencies=None, branch_labels=None + ): + self.verify_rev_id(revision) + self.revision = revision + self.down_revision = tuple_rev_as_scalar(down_revision) + self.dependencies = tuple_rev_as_scalar(dependencies) + self._resolved_dependencies = () + self._orig_branch_labels = util.to_tuple(branch_labels, default=()) + self.branch_labels = set(self._orig_branch_labels) + + def __repr__(self): + args = [repr(self.revision), repr(self.down_revision)] + if self.dependencies: + args.append("dependencies=%r" % (self.dependencies,)) + if self.branch_labels: + args.append("branch_labels=%r" % (self.branch_labels,)) + return "%s(%s)" % (self.__class__.__name__, ", ".join(args)) + + def add_nextrev(self, revision): + self._all_nextrev = self._all_nextrev.union([revision.revision]) + if self.revision in revision._versioned_down_revisions: + self.nextrev = self.nextrev.union([revision.revision]) + + @property + def _all_down_revisions(self): + return ( + util.to_tuple(self.down_revision, default=()) + + self._resolved_dependencies + ) + + @property + def _versioned_down_revisions(self): + return util.to_tuple(self.down_revision, default=()) + + @property + def is_head(self): + """Return True if this :class:`.Revision` is a 'head' revision. + + This is determined based on whether any other :class:`.Script` + within the :class:`.ScriptDirectory` refers to this + :class:`.Script`. Multiple heads can be present. + + """ + return not bool(self.nextrev) + + @property + def _is_real_head(self): + return not bool(self._all_nextrev) + + @property + def is_base(self): + """Return True if this :class:`.Revision` is a 'base' revision.""" + + return self.down_revision is None + + @property + def _is_real_base(self): + """Return True if this :class:`.Revision` is a "real" base revision, + e.g. that it has no dependencies either.""" + + # we use self.dependencies here because this is called up + # in initialization where _real_dependencies isn't set up + # yet + return self.down_revision is None and self.dependencies is None + + @property + def is_branch_point(self): + """Return True if this :class:`.Script` is a branch point. + + A branchpoint is defined as a :class:`.Script` which is referred + to by more than one succeeding :class:`.Script`, that is more + than one :class:`.Script` has a `down_revision` identifier pointing + here. + + """ + return len(self.nextrev) > 1 + + @property + def _is_real_branch_point(self): + """Return True if this :class:`.Script` is a 'real' branch point, + taking into account dependencies as well. + + """ + return len(self._all_nextrev) > 1 + + @property + def is_merge_point(self): + """Return True if this :class:`.Script` is a merge point.""" + + return len(self._versioned_down_revisions) > 1 + + +def tuple_rev_as_scalar(rev): + if not rev: + return None + elif len(rev) == 1: + return rev[0] + else: + return rev diff --git a/py3/lib/python3.6/site-packages/alembic/templates/generic/README b/py3/lib/python3.6/site-packages/alembic/templates/generic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/generic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/alembic/templates/generic/alembic.ini.mako b/py3/lib/python3.6/site-packages/alembic/templates/generic/alembic.ini.mako new file mode 100644 index 0000000..1b7d6ea --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/generic/alembic.ini.mako @@ -0,0 +1,74 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to ${script_location}/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat ${script_location}/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/py3/lib/python3.6/site-packages/alembic/templates/generic/env.py b/py3/lib/python3.6/site-packages/alembic/templates/generic/env.py new file mode 100644 index 0000000..15cb472 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/generic/env.py @@ -0,0 +1,75 @@ + +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/py3/lib/python3.6/site-packages/alembic/templates/generic/script.py.mako b/py3/lib/python3.6/site-packages/alembic/templates/generic/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/generic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/py3/lib/python3.6/site-packages/alembic/templates/multidb/README b/py3/lib/python3.6/site-packages/alembic/templates/multidb/README new file mode 100644 index 0000000..5db219f --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/multidb/README @@ -0,0 +1 @@ +Rudimentary multi-database configuration. \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/alembic/templates/multidb/alembic.ini.mako b/py3/lib/python3.6/site-packages/alembic/templates/multidb/alembic.ini.mako new file mode 100644 index 0000000..79fcb79 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/multidb/alembic.ini.mako @@ -0,0 +1,80 @@ +# a multi-database configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to ${script_location}/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat ${script_location}/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +databases = engine1, engine2 + +[engine1] +sqlalchemy.url = driver://user:pass@localhost/dbname + +[engine2] +sqlalchemy.url = driver://user:pass@localhost/dbname2 + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/py3/lib/python3.6/site-packages/alembic/templates/multidb/env.py b/py3/lib/python3.6/site-packages/alembic/templates/multidb/env.py new file mode 100644 index 0000000..607efaa --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/multidb/env.py @@ -0,0 +1,139 @@ + +import logging +from logging.config import fileConfig +import re + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +USE_TWOPHASE = False + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger("alembic.env") + +# gather section names referring to different +# databases. These are named "engine1", "engine2" +# in the sample .ini file. +db_names = config.get_main_option("databases") + +# add your model's MetaData objects here +# for 'autogenerate' support. These must be set +# up to hold just those tables targeting a +# particular database. table.tometadata() may be +# helpful here in case a "copy" of +# a MetaData is needed. +# from myapp import mymodel +# target_metadata = { +# 'engine1':mymodel.metadata1, +# 'engine2':mymodel.metadata2 +# } +target_metadata = {} + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + # for the --sql use case, run migrations for each URL into + # individual files. + + engines = {} + for name in re.split(r",\s*", db_names): + engines[name] = rec = {} + rec["url"] = context.config.get_section_option(name, "sqlalchemy.url") + + for name, rec in engines.items(): + logger.info("Migrating database %s" % name) + file_ = "%s.sql" % name + logger.info("Writing output to %s" % file_) + with open(file_, "w") as buffer: + context.configure( + url=rec["url"], + output_buffer=buffer, + target_metadata=target_metadata.get(name), + literal_binds=True, + ) + with context.begin_transaction(): + context.run_migrations(engine_name=name) + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # for the direct-to-DB use case, start a transaction on all + # engines, then run all migrations, then commit all transactions. + + engines = {} + for name in re.split(r",\s*", db_names): + engines[name] = rec = {} + rec["engine"] = engine_from_config( + context.config.get_section(name), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + for name, rec in engines.items(): + engine = rec["engine"] + rec["connection"] = conn = engine.connect() + + if USE_TWOPHASE: + rec["transaction"] = conn.begin_twophase() + else: + rec["transaction"] = conn.begin() + + try: + for name, rec in engines.items(): + logger.info("Migrating database %s" % name) + context.configure( + connection=rec["connection"], + upgrade_token="%s_upgrades" % name, + downgrade_token="%s_downgrades" % name, + target_metadata=target_metadata.get(name), + ) + context.run_migrations(engine_name=name) + + if USE_TWOPHASE: + for rec in engines.values(): + rec["transaction"].prepare() + + for rec in engines.values(): + rec["transaction"].commit() + except: + for rec in engines.values(): + rec["transaction"].rollback() + raise + finally: + for rec in engines.values(): + rec["connection"].close() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/py3/lib/python3.6/site-packages/alembic/templates/multidb/script.py.mako b/py3/lib/python3.6/site-packages/alembic/templates/multidb/script.py.mako new file mode 100644 index 0000000..c3970a5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/multidb/script.py.mako @@ -0,0 +1,45 @@ +<%! +import re + +%>"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(engine_name): + globals()["upgrade_%s" % engine_name]() + + +def downgrade(engine_name): + globals()["downgrade_%s" % engine_name]() + +<% + db_names = config.get_main_option("databases") +%> + +## generate an "upgrade_() / downgrade_()" function +## for each database name in the ini file. + +% for db_name in re.split(r',\s*', db_names): + +def upgrade_${db_name}(): + ${context.get("%s_upgrades" % db_name, "pass")} + + +def downgrade_${db_name}(): + ${context.get("%s_downgrades" % db_name, "pass")} + +% endfor diff --git a/py3/lib/python3.6/site-packages/alembic/templates/pylons/README b/py3/lib/python3.6/site-packages/alembic/templates/pylons/README new file mode 100644 index 0000000..ed3c28e --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/pylons/README @@ -0,0 +1 @@ +Configuration that reads from a Pylons project environment. \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/alembic/templates/pylons/alembic.ini.mako b/py3/lib/python3.6/site-packages/alembic/templates/pylons/alembic.ini.mako new file mode 100644 index 0000000..6f6511b --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/pylons/alembic.ini.mako @@ -0,0 +1,40 @@ +# a Pylons configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to ${script_location}/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat ${script_location}/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +pylons_config_file = ./development.ini + +# that's it ! \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/alembic/templates/pylons/env.py b/py3/lib/python3.6/site-packages/alembic/templates/pylons/env.py new file mode 100644 index 0000000..f8abf44 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/pylons/env.py @@ -0,0 +1,84 @@ +"""Pylons bootstrap environment. + +Place 'pylons_config_file' into alembic.ini, and the application will +be loaded from there. + +""" +from logging.config import fileConfig + +from paste.deploy import loadapp + +from alembic import context + + +try: + # if pylons app already in, don't create a new app + from pylons import config as pylons_config + + pylons_config["__file__"] +except: + config = context.config + # can use config['__file__'] here, i.e. the Pylons + # ini file, instead of alembic.ini + config_file = config.get_main_option("pylons_config_file") + fileConfig(config_file) + wsgi_app = loadapp("config:%s" % config_file, relative_to=".") + + +# customize this section for non-standard engine configurations. +meta = __import__( + "%s.model.meta" % wsgi_app.config["pylons.package"] +).model.meta + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + context.configure( + url=meta.engine.url, + target_metadata=target_metadata, + literal_binds=True, + ) + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + # specify here how the engine is acquired + # engine = meta.engine + raise NotImplementedError("Please specify engine connectivity here") + + with engine.connect() as connection: # noqa + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/py3/lib/python3.6/site-packages/alembic/templates/pylons/script.py.mako b/py3/lib/python3.6/site-packages/alembic/templates/pylons/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/templates/pylons/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/py3/lib/python3.6/site-packages/alembic/testing/__init__.py b/py3/lib/python3.6/site-packages/alembic/testing/__init__.py new file mode 100644 index 0000000..765dd7b --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/__init__.py @@ -0,0 +1,11 @@ +from alembic import util # noqa +from .assertions import assert_raises # noqa +from .assertions import assert_raises_message # noqa +from .assertions import eq_ # noqa +from .assertions import eq_ignore_whitespace # noqa +from .assertions import is_ # noqa +from .assertions import is_not_ # noqa +from .assertions import ne_ # noqa +from .config import requirements as requires # noqa +from .fixtures import TestBase # noqa +from .util import provide_metadata # noqa diff --git a/py3/lib/python3.6/site-packages/alembic/testing/assertions.py b/py3/lib/python3.6/site-packages/alembic/testing/assertions.py new file mode 100644 index 0000000..196e791 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/assertions.py @@ -0,0 +1,222 @@ +from __future__ import absolute_import + +import contextlib +import re +import warnings + +from sqlalchemy import exc as sa_exc +from sqlalchemy.engine import default +from sqlalchemy.util import decorator + +from . import config +from . import mock +from .exclusions import db_spec +from .. import util +from ..util.compat import py3k +from ..util.compat import text_type + + +if not util.sqla_094: + + def eq_(a, b, msg=None): + """Assert a == b, with repr messaging on failure.""" + assert a == b, msg or "%r != %r" % (a, b) + + def ne_(a, b, msg=None): + """Assert a != b, with repr messaging on failure.""" + assert a != b, msg or "%r == %r" % (a, b) + + def is_(a, b, msg=None): + """Assert a is b, with repr messaging on failure.""" + assert a is b, msg or "%r is not %r" % (a, b) + + def is_not_(a, b, msg=None): + """Assert a is not b, with repr messaging on failure.""" + assert a is not b, msg or "%r is %r" % (a, b) + + def assert_raises(except_cls, callable_, *args, **kw): + try: + callable_(*args, **kw) + success = False + except except_cls: + success = True + + # assert outside the block so it works for AssertionError too ! + assert success, "Callable did not raise an exception" + + def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): + try: + callable_(*args, **kwargs) + assert False, "Callable did not raise an exception" + except except_cls as e: + assert re.search(msg, text_type(e), re.UNICODE), "%r !~ %s" % ( + msg, + e, + ) + print(text_type(e).encode("utf-8")) + + +else: + from sqlalchemy.testing.assertions import assert_raises # noqa + from sqlalchemy.testing.assertions import assert_raises_message # noqa + from sqlalchemy.testing.assertions import eq_ # noqa + from sqlalchemy.testing.assertions import is_ # noqa + from sqlalchemy.testing.assertions import is_not_ # noqa + from sqlalchemy.testing.assertions import ne_ # noqa + + +def eq_ignore_whitespace(a, b, msg=None): + a = re.sub(r"^\s+?|\n", "", a) + a = re.sub(r" {2,}", " ", a) + b = re.sub(r"^\s+?|\n", "", b) + b = re.sub(r" {2,}", " ", b) + + # convert for unicode string rendering, + # using special escape character "!U" + if py3k: + b = re.sub(r"!U", "", b) + else: + b = re.sub(r"!U", "u", b) + + assert a == b, msg or "%r != %r" % (a, b) + + +def assert_compiled(element, assert_string, dialect=None): + dialect = _get_dialect(dialect) + eq_( + text_type(element.compile(dialect=dialect)) + .replace("\n", "") + .replace("\t", ""), + assert_string.replace("\n", "").replace("\t", ""), + ) + + +_dialect_mods = {} + + +def _get_dialect(name): + if name is None or name == "default": + return default.DefaultDialect() + else: + try: + dialect_mod = _dialect_mods[name] + except KeyError: + dialect_mod = getattr( + __import__("sqlalchemy.dialects.%s" % name).dialects, name + ) + _dialect_mods[name] = dialect_mod + d = dialect_mod.dialect() + if name == "postgresql": + d.implicit_returning = True + elif name == "mssql": + d.legacy_schema_aliasing = False + return d + + +def expect_warnings(*messages, **kw): + """Context manager which expects one or more warnings. + + With no arguments, squelches all SAWarnings emitted via + sqlalchemy.util.warn and sqlalchemy.util.warn_limited. Otherwise + pass string expressions that will match selected warnings via regex; + all non-matching warnings are sent through. + + The expect version **asserts** that the warnings were in fact seen. + + Note that the test suite sets SAWarning warnings to raise exceptions. + + """ + return _expect_warnings(sa_exc.SAWarning, messages, **kw) + + +@contextlib.contextmanager +def expect_warnings_on(db, *messages, **kw): + """Context manager which expects one or more warnings on specific + dialects. + + The expect version **asserts** that the warnings were in fact seen. + + """ + spec = db_spec(db) + + if isinstance(db, util.string_types) and not spec(config._current): + yield + else: + with expect_warnings(*messages, **kw): + yield + + +def emits_warning(*messages): + """Decorator form of expect_warnings(). + + Note that emits_warning does **not** assert that the warnings + were in fact seen. + + """ + + @decorator + def decorate(fn, *args, **kw): + with expect_warnings(assert_=False, *messages): + return fn(*args, **kw) + + return decorate + + +def emits_warning_on(db, *messages): + """Mark a test as emitting a warning on a specific dialect. + + With no arguments, squelches all SAWarning failures. Or pass one or more + strings; these will be matched to the root of the warning description by + warnings.filterwarnings(). + + Note that emits_warning_on does **not** assert that the warnings + were in fact seen. + + """ + + @decorator + def decorate(fn, *args, **kw): + with expect_warnings_on(db, *messages): + return fn(*args, **kw) + + return decorate + + +@contextlib.contextmanager +def _expect_warnings(exc_cls, messages, regex=True, assert_=True): + + if regex: + filters = [re.compile(msg, re.I) for msg in messages] + else: + filters = messages + + seen = set(filters) + + real_warn = warnings.warn + + def our_warn(msg, exception=None, *arg, **kw): + if exception and not issubclass(exception, exc_cls): + return real_warn(msg, exception, *arg, **kw) + + if not filters: + return + + for filter_ in filters: + if (regex and filter_.match(msg)) or ( + not regex and filter_ == msg + ): + seen.discard(filter_) + break + else: + if exception is None: + real_warn(msg, *arg, **kw) + else: + real_warn(msg, exception, *arg, **kw) + + with mock.patch("warnings.warn", our_warn): + yield + + if assert_: + assert not seen, "Warnings were not seen: %s" % ", ".join( + "%r" % (s.pattern if regex else s) for s in seen + ) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/compat.py b/py3/lib/python3.6/site-packages/alembic/testing/compat.py new file mode 100644 index 0000000..9fbd50f --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/compat.py @@ -0,0 +1,12 @@ +def get_url_driver_name(url): + if "+" not in url.drivername: + return url.get_dialect().driver + else: + return url.drivername.split("+")[1] + + +def get_url_backend_name(url): + if "+" not in url.drivername: + return url.drivername + else: + return url.drivername.split("+")[0] diff --git a/py3/lib/python3.6/site-packages/alembic/testing/config.py b/py3/lib/python3.6/site-packages/alembic/testing/config.py new file mode 100644 index 0000000..7d7009e --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/config.py @@ -0,0 +1,91 @@ +# testing/config.py +# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; + this should be removable when Alembic targets SQLAlchemy 1.0.0 +""" + +import collections + +requirements = None +db = None +db_url = None +db_opts = None +file_config = None +test_schema = None +test_schema_2 = None +_current = None + + +class Config(object): + def __init__(self, db, db_opts, options, file_config): + self._set_name(db) + self.db = db + self.db_opts = db_opts + self.options = options + self.file_config = file_config + self.test_schema = "test_schema" + self.test_schema_2 = "test_schema_2" + + _stack = collections.deque() + _configs = set() + + def _set_name(self, db): + if db.dialect.server_version_info: + svi = ".".join(str(tok) for tok in db.dialect.server_version_info) + self.name = "%s+%s_[%s]" % (db.name, db.driver, svi) + else: + self.name = "%s+%s" % (db.name, db.driver) + + @classmethod + def register(cls, db, db_opts, options, file_config): + """add a config as one of the global configs. + + If there are no configs set up yet, this config also + gets set as the "_current". + """ + cfg = Config(db, db_opts, options, file_config) + cls._configs.add(cfg) + return cfg + + @classmethod + def set_as_current(cls, config): + global db, _current, db_url, test_schema, test_schema_2, db_opts + _current = config + db_url = config.db.url + db_opts = config.db_opts + test_schema = config.test_schema + test_schema_2 = config.test_schema_2 + db = config.db + + @classmethod + def push_engine(cls, db): + assert _current, "Can't push without a default Config set up" + cls.push( + Config( + db, _current.db_opts, _current.options, _current.file_config + ) + ) + + @classmethod + def push(cls, config): + cls._stack.append(_current) + cls.set_as_current(config) + + @classmethod + def reset(cls): + if cls._stack: + cls.set_as_current(cls._stack[0]) + cls._stack.clear() + + @classmethod + def all_configs(cls): + return cls._configs + + @classmethod + def all_dbs(cls): + for cfg in cls.all_configs(): + yield cfg.db diff --git a/py3/lib/python3.6/site-packages/alembic/testing/engines.py b/py3/lib/python3.6/site-packages/alembic/testing/engines.py new file mode 100644 index 0000000..68d0068 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/engines.py @@ -0,0 +1,27 @@ +# testing/engines.py +# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; + this should be removable when Alembic targets SQLAlchemy 1.0.0. +""" + +from __future__ import absolute_import + +from . import config + + +def testing_engine(url=None, options=None): + """Produce an engine configured by --options with optional overrides.""" + + from sqlalchemy import create_engine + + url = url or config.db.url + if options is None: + options = config.db_opts + + engine = create_engine(url, **options) + + return engine diff --git a/py3/lib/python3.6/site-packages/alembic/testing/env.py b/py3/lib/python3.6/site-packages/alembic/testing/env.py new file mode 100644 index 0000000..04ebfe5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/env.py @@ -0,0 +1,496 @@ +#!coding: utf-8 + +import os +import shutil +import textwrap + +from . import engines +from . import provision +from .. import util +from ..script import Script +from ..script import ScriptDirectory +from ..util.compat import get_current_bytecode_suffixes +from ..util.compat import has_pep3147 +from ..util.compat import u + + +def _get_staging_directory(): + if provision.FOLLOWER_IDENT: + return "scratch_%s" % provision.FOLLOWER_IDENT + else: + return "scratch" + + +def staging_env(create=True, template="generic", sourceless=False): + from alembic import command, script + + cfg = _testing_config() + if create: + path = os.path.join(_get_staging_directory(), "scripts") + if os.path.exists(path): + shutil.rmtree(path) + command.init(cfg, path, template=template) + if sourceless: + try: + # do an import so that a .pyc/.pyo is generated. + util.load_python_file(path, "env.py") + except AttributeError: + # we don't have the migration context set up yet + # so running the .env py throws this exception. + # theoretically we could be using py_compiler here to + # generate .pyc/.pyo without importing but not really + # worth it. + pass + assert sourceless in ( + "pep3147_envonly", + "simple", + "pep3147_everything", + ), sourceless + make_sourceless( + os.path.join(path, "env.py"), + "pep3147" if "pep3147" in sourceless else "simple", + ) + + sc = script.ScriptDirectory.from_config(cfg) + return sc + + +def clear_staging_env(): + shutil.rmtree(_get_staging_directory(), True) + + +def script_file_fixture(txt): + dir_ = os.path.join(_get_staging_directory(), "scripts") + path = os.path.join(dir_, "script.py.mako") + with open(path, "w") as f: + f.write(txt) + + +def env_file_fixture(txt): + dir_ = os.path.join(_get_staging_directory(), "scripts") + txt = ( + """ +from alembic import context + +config = context.config +""" + + txt + ) + + path = os.path.join(dir_, "env.py") + pyc_path = util.pyc_file_from_path(path) + if pyc_path: + os.unlink(pyc_path) + + with open(path, "w") as f: + f.write(txt) + + +def _sqlite_file_db(tempname="foo.db"): + dir_ = os.path.join(_get_staging_directory(), "scripts") + url = "sqlite:///%s/%s" % (dir_, tempname) + return engines.testing_engine(url=url) + + +def _sqlite_testing_config(sourceless=False): + dir_ = os.path.join(_get_staging_directory(), "scripts") + url = "sqlite:///%s/foo.db" % dir_ + + return _write_config_file( + """ +[alembic] +script_location = %s +sqlalchemy.url = %s +sourceless = %s + +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + """ + % (dir_, url, "true" if sourceless else "false") + ) + + +def _multi_dir_testing_config(sourceless=False, extra_version_location=""): + dir_ = os.path.join(_get_staging_directory(), "scripts") + url = "sqlite:///%s/foo.db" % dir_ + + return _write_config_file( + """ +[alembic] +script_location = %s +sqlalchemy.url = %s +sourceless = %s +version_locations = %%(here)s/model1/ %%(here)s/model2/ %%(here)s/model3/ %s + +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + """ + % ( + dir_, + url, + "true" if sourceless else "false", + extra_version_location, + ) + ) + + +def _no_sql_testing_config(dialect="postgresql", directives=""): + """use a postgresql url with no host so that + connections guaranteed to fail""" + dir_ = os.path.join(_get_staging_directory(), "scripts") + return _write_config_file( + """ +[alembic] +script_location = %s +sqlalchemy.url = %s:// +%s + +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + +""" + % (dir_, dialect, directives) + ) + + +def _write_config_file(text): + cfg = _testing_config() + with open(cfg.config_file_name, "w") as f: + f.write(text) + return cfg + + +def _testing_config(): + from alembic.config import Config + + if not os.access(_get_staging_directory(), os.F_OK): + os.mkdir(_get_staging_directory()) + return Config(os.path.join(_get_staging_directory(), "test_alembic.ini")) + + +def write_script( + scriptdir, rev_id, content, encoding="ascii", sourceless=False +): + old = scriptdir.revision_map.get_revision(rev_id) + path = old.path + + content = textwrap.dedent(content) + if encoding: + content = content.encode(encoding) + with open(path, "wb") as fp: + fp.write(content) + pyc_path = util.pyc_file_from_path(path) + if pyc_path: + os.unlink(pyc_path) + script = Script._from_path(scriptdir, path) + old = scriptdir.revision_map.get_revision(script.revision) + if old.down_revision != script.down_revision: + raise Exception( + "Can't change down_revision " "on a refresh operation." + ) + scriptdir.revision_map.add_revision(script, _replace=True) + + if sourceless: + make_sourceless( + path, "pep3147" if sourceless == "pep3147_everything" else "simple" + ) + + +def make_sourceless(path, style): + + import py_compile + + py_compile.compile(path) + + if style == "simple" and has_pep3147(): + pyc_path = util.pyc_file_from_path(path) + suffix = get_current_bytecode_suffixes()[0] + filepath, ext = os.path.splitext(path) + simple_pyc_path = filepath + suffix + shutil.move(pyc_path, simple_pyc_path) + pyc_path = simple_pyc_path + elif style == "pep3147" and not has_pep3147(): + raise NotImplementedError() + else: + assert style in ("pep3147", "simple") + pyc_path = util.pyc_file_from_path(path) + + assert os.access(pyc_path, os.F_OK) + + os.unlink(path) + + +def three_rev_fixture(cfg): + a = util.rev_id() + b = util.rev_id() + c = util.rev_id() + + script = ScriptDirectory.from_config(cfg) + script.generate_revision(a, "revision a", refresh=True) + write_script( + script, + a, + """\ +"Rev A" +revision = '%s' +down_revision = None + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 1") + + +def downgrade(): + op.execute("DROP STEP 1") + +""" + % a, + ) + + script.generate_revision(b, "revision b", refresh=True) + write_script( + script, + b, + u( + """# coding: utf-8 +"Rev B, méil, %3" +revision = '{}' +down_revision = '{}' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 2") + + +def downgrade(): + op.execute("DROP STEP 2") + +""" + ).format(b, a), + encoding="utf-8", + ) + + script.generate_revision(c, "revision c", refresh=True) + write_script( + script, + c, + """\ +"Rev C" +revision = '%s' +down_revision = '%s' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 3") + + +def downgrade(): + op.execute("DROP STEP 3") + +""" + % (c, b), + ) + return a, b, c + + +def multi_heads_fixture(cfg, a, b, c): + """Create a multiple head fixture from the three-revs fixture""" + + d = util.rev_id() + e = util.rev_id() + f = util.rev_id() + + script = ScriptDirectory.from_config(cfg) + script.generate_revision( + d, "revision d from b", head=b, splice=True, refresh=True + ) + write_script( + script, + d, + """\ +"Rev D" +revision = '%s' +down_revision = '%s' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 4") + + +def downgrade(): + op.execute("DROP STEP 4") + +""" + % (d, b), + ) + + script.generate_revision( + e, "revision e from d", head=d, splice=True, refresh=True + ) + write_script( + script, + e, + """\ +"Rev E" +revision = '%s' +down_revision = '%s' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 5") + + +def downgrade(): + op.execute("DROP STEP 5") + +""" + % (e, d), + ) + + script.generate_revision( + f, "revision f from b", head=b, splice=True, refresh=True + ) + write_script( + script, + f, + """\ +"Rev F" +revision = '%s' +down_revision = '%s' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 6") + + +def downgrade(): + op.execute("DROP STEP 6") + +""" + % (f, b), + ) + + return d, e, f + + +def _multidb_testing_config(engines): + """alembic.ini fixture to work exactly with the 'multidb' template""" + + dir_ = os.path.join(_get_staging_directory(), "scripts") + + databases = ", ".join(engines.keys()) + engines = "\n\n".join( + "[%s]\n" "sqlalchemy.url = %s" % (key, value.url) + for key, value in engines.items() + ) + + return _write_config_file( + """ +[alembic] +script_location = %s +sourceless = false + +databases = %s + +%s +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + """ + % (dir_, databases, engines) + ) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/exclusions.py b/py3/lib/python3.6/site-packages/alembic/testing/exclusions.py new file mode 100644 index 0000000..af18593 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/exclusions.py @@ -0,0 +1,437 @@ +# testing/exclusions.py +# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; + this should be removable when Alembic targets SQLAlchemy 1.0.0 +""" + + +import contextlib +import operator + +from sqlalchemy import util +from sqlalchemy.util import decorator + +from . import config +from .compat import get_url_backend_name +from .compat import get_url_driver_name +from .plugin.plugin_base import SkipTest +from ..util import compat + + +def skip_if(predicate, reason=None): + rule = compound() + pred = _as_predicate(predicate, reason) + rule.skips.add(pred) + return rule + + +def fails_if(predicate, reason=None): + rule = compound() + pred = _as_predicate(predicate, reason) + rule.fails.add(pred) + return rule + + +class compound(object): + def __init__(self): + self.fails = set() + self.skips = set() + self.tags = set() + + def __add__(self, other): + return self.add(other) + + def add(self, *others): + copy = compound() + copy.fails.update(self.fails) + copy.skips.update(self.skips) + copy.tags.update(self.tags) + for other in others: + copy.fails.update(other.fails) + copy.skips.update(other.skips) + copy.tags.update(other.tags) + return copy + + def not_(self): + copy = compound() + copy.fails.update(NotPredicate(fail) for fail in self.fails) + copy.skips.update(NotPredicate(skip) for skip in self.skips) + copy.tags.update(self.tags) + return copy + + @property + def enabled(self): + return self.enabled_for_config(config._current) + + def enabled_for_config(self, config): + for predicate in self.skips.union(self.fails): + if predicate(config): + return False + else: + return True + + def matching_config_reasons(self, config): + return [ + predicate._as_string(config) + for predicate in self.skips.union(self.fails) + if predicate(config) + ] + + def include_test(self, include_tags, exclude_tags): + return bool( + not self.tags.intersection(exclude_tags) + and (not include_tags or self.tags.intersection(include_tags)) + ) + + def _extend(self, other): + self.skips.update(other.skips) + self.fails.update(other.fails) + self.tags.update(other.tags) + + def __call__(self, fn): + if hasattr(fn, "_sa_exclusion_extend"): + fn._sa_exclusion_extend._extend(self) + return fn + + @decorator + def decorate(fn, *args, **kw): + return self._do(config._current, fn, *args, **kw) + + decorated = decorate(fn) + decorated._sa_exclusion_extend = self + return decorated + + @contextlib.contextmanager + def fail_if(self): + all_fails = compound() + all_fails.fails.update(self.skips.union(self.fails)) + + try: + yield + except Exception as ex: + all_fails._expect_failure(config._current, ex) + else: + all_fails._expect_success(config._current) + + def _do(self, config, fn, *args, **kw): + for skip in self.skips: + if skip(config): + msg = "'%s' : %s" % (fn.__name__, skip._as_string(config)) + raise SkipTest(msg) + + try: + return_value = fn(*args, **kw) + except Exception as ex: + self._expect_failure(config, ex, name=fn.__name__) + else: + self._expect_success(config, name=fn.__name__) + return return_value + + def _expect_failure(self, config, ex, name="block"): + for fail in self.fails: + if fail(config): + print( + ( + "%s failed as expected (%s): %s " + % (name, fail._as_string(config), str(ex)) + ) + ) + break + else: + compat.raise_from_cause(ex) + + def _expect_success(self, config, name="block"): + if not self.fails: + return + for fail in self.fails: + if not fail(config): + break + else: + raise AssertionError( + "Unexpected success for '%s' (%s)" + % ( + name, + " and ".join( + fail._as_string(config) for fail in self.fails + ), + ) + ) + + +def requires_tag(tagname): + return tags([tagname]) + + +def tags(tagnames): + comp = compound() + comp.tags.update(tagnames) + return comp + + +def only_if(predicate, reason=None): + predicate = _as_predicate(predicate) + return skip_if(NotPredicate(predicate), reason) + + +def succeeds_if(predicate, reason=None): + predicate = _as_predicate(predicate) + return fails_if(NotPredicate(predicate), reason) + + +class Predicate(object): + @classmethod + def as_predicate(cls, predicate, description=None): + if isinstance(predicate, compound): + return cls.as_predicate(predicate.fails.union(predicate.skips)) + + elif isinstance(predicate, Predicate): + if description and predicate.description is None: + predicate.description = description + return predicate + elif isinstance(predicate, (list, set)): + return OrPredicate( + [cls.as_predicate(pred) for pred in predicate], description + ) + elif isinstance(predicate, tuple): + return SpecPredicate(*predicate) + elif isinstance(predicate, compat.string_types): + tokens = predicate.split(" ", 2) + op = spec = None + db = tokens.pop(0) + if tokens: + op = tokens.pop(0) + if tokens: + spec = tuple(int(d) for d in tokens.pop(0).split(".")) + return SpecPredicate(db, op, spec, description=description) + elif util.callable(predicate): + return LambdaPredicate(predicate, description) + else: + assert False, "unknown predicate type: %s" % predicate + + def _format_description(self, config, negate=False): + bool_ = self(config) + if negate: + bool_ = not negate + return self.description % { + "driver": get_url_driver_name(config.db.url), + "database": get_url_backend_name(config.db.url), + "doesnt_support": "doesn't support" if bool_ else "does support", + "does_support": "does support" if bool_ else "doesn't support", + } + + def _as_string(self, config=None, negate=False): + raise NotImplementedError() + + +class BooleanPredicate(Predicate): + def __init__(self, value, description=None): + self.value = value + self.description = description or "boolean %s" % value + + def __call__(self, config): + return self.value + + def _as_string(self, config, negate=False): + return self._format_description(config, negate=negate) + + +class SpecPredicate(Predicate): + def __init__(self, db, op=None, spec=None, description=None): + self.db = db + self.op = op + self.spec = spec + self.description = description + + _ops = { + "<": operator.lt, + ">": operator.gt, + "==": operator.eq, + "!=": operator.ne, + "<=": operator.le, + ">=": operator.ge, + "in": operator.contains, + "between": lambda val, pair: val >= pair[0] and val <= pair[1], + } + + def __call__(self, config): + engine = config.db + + if "+" in self.db: + dialect, driver = self.db.split("+") + else: + dialect, driver = self.db, None + + if dialect and engine.name != dialect: + return False + if driver is not None and engine.driver != driver: + return False + + if self.op is not None: + assert driver is None, "DBAPI version specs not supported yet" + + version = _server_version(engine) + oper = ( + hasattr(self.op, "__call__") and self.op or self._ops[self.op] + ) + return oper(version, self.spec) + else: + return True + + def _as_string(self, config, negate=False): + if self.description is not None: + return self._format_description(config) + elif self.op is None: + if negate: + return "not %s" % self.db + else: + return "%s" % self.db + else: + if negate: + return "not %s %s %s" % (self.db, self.op, self.spec) + else: + return "%s %s %s" % (self.db, self.op, self.spec) + + +class LambdaPredicate(Predicate): + def __init__(self, lambda_, description=None, args=None, kw=None): + spec = compat.inspect_getargspec(lambda_) + if not spec[0]: + self.lambda_ = lambda db: lambda_() + else: + self.lambda_ = lambda_ + self.args = args or () + self.kw = kw or {} + if description: + self.description = description + elif lambda_.__doc__: + self.description = lambda_.__doc__ + else: + self.description = "custom function" + + def __call__(self, config): + return self.lambda_(config) + + def _as_string(self, config, negate=False): + return self._format_description(config) + + +class NotPredicate(Predicate): + def __init__(self, predicate, description=None): + self.predicate = predicate + self.description = description + + def __call__(self, config): + return not self.predicate(config) + + def _as_string(self, config, negate=False): + if self.description: + return self._format_description(config, not negate) + else: + return self.predicate._as_string(config, not negate) + + +class OrPredicate(Predicate): + def __init__(self, predicates, description=None): + self.predicates = predicates + self.description = description + + def __call__(self, config): + for pred in self.predicates: + if pred(config): + return True + return False + + def _eval_str(self, config, negate=False): + if negate: + conjunction = " and " + else: + conjunction = " or " + return conjunction.join( + p._as_string(config, negate=negate) for p in self.predicates + ) + + def _negation_str(self, config): + if self.description is not None: + return "Not " + self._format_description(config) + else: + return self._eval_str(config, negate=True) + + def _as_string(self, config, negate=False): + if negate: + return self._negation_str(config) + else: + if self.description is not None: + return self._format_description(config) + else: + return self._eval_str(config) + + +_as_predicate = Predicate.as_predicate + + +def _is_excluded(db, op, spec): + return SpecPredicate(db, op, spec)(config._current) + + +def _server_version(engine): + """Return a server_version_info tuple.""" + + # force metadata to be retrieved + conn = engine.connect() + version = getattr(engine.dialect, "server_version_info", ()) + conn.close() + return version + + +def db_spec(*dbs): + return OrPredicate([Predicate.as_predicate(db) for db in dbs]) + + +def open(): # noqa + return skip_if(BooleanPredicate(False, "mark as execute")) + + +def closed(): + return skip_if(BooleanPredicate(True, "marked as skip")) + + +def fails(msg=None): + return fails_if(BooleanPredicate(True, msg or "expected to fail")) + + +@decorator +def future(fn, *arg): + return fails_if(LambdaPredicate(fn), "Future feature") + + +def fails_on(db, reason=None): + return fails_if(SpecPredicate(db), reason) + + +def fails_on_everything_except(*dbs): + return succeeds_if(OrPredicate([Predicate.as_predicate(db) for db in dbs])) + + +def skip(db, reason=None): + return skip_if(SpecPredicate(db), reason) + + +def only_on(dbs, reason=None): + return only_if( + OrPredicate([Predicate.as_predicate(db) for db in util.to_list(dbs)]) + ) + + +def exclude(db, op, spec, reason=None): + return skip_if(SpecPredicate(db, op, spec), reason) + + +def against(config, *queries): + assert queries, "no queries sent!" + return OrPredicate([Predicate.as_predicate(query) for query in queries])( + config + ) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/fixtures.py b/py3/lib/python3.6/site-packages/alembic/testing/fixtures.py new file mode 100644 index 0000000..6243467 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/fixtures.py @@ -0,0 +1,277 @@ +# coding: utf-8 +from contextlib import contextmanager +import io +import re + +from sqlalchemy import Column +from sqlalchemy import create_engine +from sqlalchemy import inspect +from sqlalchemy import MetaData +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy import text + +import alembic +from . import config +from . import mock +from .assertions import _get_dialect +from .assertions import eq_ +from .plugin.plugin_base import SkipTest +from .. import util +from ..environment import EnvironmentContext +from ..migration import MigrationContext +from ..operations import Operations +from ..util.compat import configparser +from ..util.compat import string_types +from ..util.compat import text_type + +testing_config = configparser.ConfigParser() +testing_config.read(["test.cfg"]) + + +if not util.sqla_094: + + class TestBase(object): + # A sequence of database names to always run, regardless of the + # constraints below. + __whitelist__ = () + + # A sequence of requirement names matching testing.requires decorators + __requires__ = () + + # A sequence of dialect names to exclude from the test class. + __unsupported_on__ = () + + # If present, test class is only runnable for the *single* specified + # dialect. If you need multiple, use __unsupported_on__ and invert. + __only_on__ = None + + # A sequence of no-arg callables. If any are True, the entire + # testcase is skipped. + __skip_if__ = None + + def assert_(self, val, msg=None): + assert val, msg + + # apparently a handful of tests are doing this....OK + def setup(self): + if hasattr(self, "setUp"): + self.setUp() + + def teardown(self): + if hasattr(self, "tearDown"): + self.tearDown() + + +else: + from sqlalchemy.testing.fixtures import TestBase # noqa + + +def capture_db(): + buf = [] + + def dump(sql, *multiparams, **params): + buf.append(str(sql.compile(dialect=engine.dialect))) + + engine = create_engine("postgresql://", strategy="mock", executor=dump) + return engine, buf + + +_engs = {} + + +@contextmanager +def capture_context_buffer(**kw): + if kw.pop("bytes_io", False): + buf = io.BytesIO() + else: + buf = io.StringIO() + + kw.update({"dialect_name": "sqlite", "output_buffer": buf}) + conf = EnvironmentContext.configure + + def configure(*arg, **opt): + opt.update(**kw) + return conf(*arg, **opt) + + with mock.patch.object(EnvironmentContext, "configure", configure): + yield buf + + +def op_fixture( + dialect="default", + as_sql=False, + naming_convention=None, + literal_binds=False, + native_boolean=None, +): + + opts = {} + if naming_convention: + if not util.sqla_092: + raise SkipTest( + "naming_convention feature requires " "sqla 0.9.2 or greater" + ) + opts["target_metadata"] = MetaData(naming_convention=naming_convention) + + class buffer_(object): + def __init__(self): + self.lines = [] + + def write(self, msg): + msg = msg.strip() + msg = re.sub(r"[\n\t]", "", msg) + if as_sql: + # the impl produces soft tabs, + # so search for blocks of 4 spaces + msg = re.sub(r" ", "", msg) + msg = re.sub(r"\;\n*$", "", msg) + + self.lines.append(msg) + + def flush(self): + pass + + buf = buffer_() + + class ctx(MigrationContext): + def clear_assertions(self): + buf.lines[:] = [] + + def assert_(self, *sql): + # TODO: make this more flexible about + # whitespace and such + eq_(buf.lines, list(sql)) + + def assert_contains(self, sql): + for stmt in buf.lines: + if sql in stmt: + return + else: + assert False, "Could not locate fragment %r in %r" % ( + sql, + buf.lines, + ) + + if as_sql: + opts["as_sql"] = as_sql + if literal_binds: + opts["literal_binds"] = literal_binds + if dialect == "mariadb": + ctx_dialect = _get_dialect("mysql") + ctx_dialect.server_version_info = (10, 0, 0, "MariaDB") + + else: + ctx_dialect = _get_dialect(dialect) + if native_boolean is not None: + ctx_dialect.supports_native_boolean = native_boolean + # this is new as of SQLAlchemy 1.2.7 and is used by SQL Server, + # which breaks assumptions in the alembic test suite + ctx_dialect.non_native_boolean_check_constraint = True + if not as_sql: + + def execute(stmt, *multiparam, **param): + if isinstance(stmt, string_types): + stmt = text(stmt) + assert stmt.supports_execution + sql = text_type(stmt.compile(dialect=ctx_dialect)) + + buf.write(sql) + + connection = mock.Mock(dialect=ctx_dialect, execute=execute) + else: + opts["output_buffer"] = buf + connection = None + context = ctx(ctx_dialect, connection, opts) + + alembic.op._proxy = Operations(context) + return context + + +class AlterColRoundTripFixture(object): + + # since these tests are about syntax, use more recent SQLAlchemy as some of + # the type / server default compare logic might not work on older + # SQLAlchemy versions as seems to be the case for SQLAlchemy 1.1 on Oracle + + __requires__ = ("alter_column", "sqlalchemy_12") + + def setUp(self): + self.conn = config.db.connect() + self.ctx = MigrationContext.configure(self.conn) + self.op = Operations(self.ctx) + self.metadata = MetaData() + + def _compare_type(self, t1, t2): + c1 = Column("q", t1) + c2 = Column("q", t2) + assert not self.ctx.impl.compare_type( + c1, c2 + ), "Type objects %r and %r didn't compare as equivalent" % (t1, t2) + + def _compare_server_default(self, t1, s1, t2, s2): + c1 = Column("q", t1, server_default=s1) + c2 = Column("q", t2, server_default=s2) + assert not self.ctx.impl.compare_server_default( + c1, c2, s2, s1 + ), "server defaults %r and %r didn't compare as equivalent" % (s1, s2) + + def tearDown(self): + self.metadata.drop_all(self.conn) + self.conn.close() + + def _run_alter_col(self, from_, to_, compare=None): + column = Column( + from_.get("name", "colname"), + from_.get("type", String(10)), + nullable=from_.get("nullable", True), + server_default=from_.get("server_default", None), + # comment=from_.get("comment", None) + ) + t = Table("x", self.metadata, column) + + t.create(self.conn) + insp = inspect(self.conn) + old_col = insp.get_columns("x")[0] + + # TODO: conditional comment support + self.op.alter_column( + "x", + column.name, + existing_type=column.type, + existing_server_default=column.server_default + if column.server_default is not None + else False, + existing_nullable=True if column.nullable else False, + # existing_comment=column.comment, + nullable=to_.get("nullable", None), + # modify_comment=False, + server_default=to_.get("server_default", False), + new_column_name=to_.get("name", None), + type_=to_.get("type", None), + ) + + insp = inspect(self.conn) + new_col = insp.get_columns("x")[0] + + if compare is None: + compare = to_ + + eq_( + new_col["name"], + compare["name"] if "name" in compare else column.name, + ) + self._compare_type( + new_col["type"], compare.get("type", old_col["type"]) + ) + eq_(new_col["nullable"], compare.get("nullable", column.nullable)) + self._compare_server_default( + new_col["type"], + new_col.get("default", None), + compare.get("type", old_col["type"]), + compare["server_default"].text + if "server_default" in compare + else column.server_default.arg.text + if column.server_default is not None + else None, + ) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/mock.py b/py3/lib/python3.6/site-packages/alembic/testing/mock.py new file mode 100644 index 0000000..c0c38a0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/mock.py @@ -0,0 +1,29 @@ +# testing/mock.py +# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Import stub for mock library. + + .. note:: + + copied/adapted from SQLAlchemy master for backwards compatibility; + this should be removable when Alembic targets SQLAlchemy 1.0.0 + +""" +from __future__ import absolute_import + +from ..util.compat import py3k + +if py3k: + from unittest.mock import MagicMock, Mock, call, patch, ANY +else: + try: + from mock import MagicMock, Mock, call, patch, ANY # noqa + except ImportError: + raise ImportError( + "SQLAlchemy's test suite requires the " + "'mock' library as of 0.8.2." + ) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/plugin/__init__.py b/py3/lib/python3.6/site-packages/alembic/testing/plugin/__init__.py new file mode 100644 index 0000000..98616f4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/plugin/__init__.py @@ -0,0 +1,3 @@ +"""NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; + this should be removable when Alembic targets SQLAlchemy 1.0.0 +""" diff --git a/py3/lib/python3.6/site-packages/alembic/testing/plugin/bootstrap.py b/py3/lib/python3.6/site-packages/alembic/testing/plugin/bootstrap.py new file mode 100644 index 0000000..4bd415d --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/plugin/bootstrap.py @@ -0,0 +1,47 @@ +""" +Bootstrapper for nose/pytest plugins. + +The entire rationale for this system is to get the modules in plugin/ +imported without importing all of the supporting library, so that we can +set up things for testing before coverage starts. + +The rationale for all of plugin/ being *in* the supporting library in the +first place is so that the testing and plugin suite is available to other +libraries, mainly external SQLAlchemy and Alembic dialects, to make use +of the same test environment and standard suites available to +SQLAlchemy/Alembic themselves without the need to ship/install a separate +package outside of SQLAlchemy. + +NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; +this should be removable when Alembic targets SQLAlchemy 1.0.0. + +""" + +import os +import sys + +bootstrap_file = locals()["bootstrap_file"] +to_bootstrap = locals()["to_bootstrap"] + + +def load_file_as_module(name): + path = os.path.join(os.path.dirname(bootstrap_file), "%s.py" % name) + if sys.version_info.major >= 3: + from importlib import machinery + + mod = machinery.SourceFileLoader(name, path).load_module() + else: + import imp + + mod = imp.load_source(name, path) + return mod + + +if to_bootstrap == "pytest": + sys.modules["alembic_plugin_base"] = load_file_as_module("plugin_base") + sys.modules["alembic_pytestplugin"] = load_file_as_module("pytestplugin") +elif to_bootstrap == "nose": + sys.modules["alembic_plugin_base"] = load_file_as_module("plugin_base") + sys.modules["alembic_noseplugin"] = load_file_as_module("noseplugin") +else: + raise Exception("unknown bootstrap: %s" % to_bootstrap) # noqa diff --git a/py3/lib/python3.6/site-packages/alembic/testing/plugin/noseplugin.py b/py3/lib/python3.6/site-packages/alembic/testing/plugin/noseplugin.py new file mode 100644 index 0000000..fafb9e1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/plugin/noseplugin.py @@ -0,0 +1,106 @@ +# plugin/noseplugin.py +# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +Enhance nose with extra options and behaviors for running SQLAlchemy tests. + + +NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; +this should be removable when Alembic targets SQLAlchemy 1.0.0. + +""" + +try: + # installed by bootstrap.py + import alembic_plugin_base as plugin_base +except ImportError: + # assume we're a package, use traditional import + from . import plugin_base + +import os +import sys + +from nose.plugins import Plugin + +fixtures = None + +py3k = sys.version_info.major >= 3 + + +class NoseSQLAlchemy(Plugin): + enabled = True + + name = "sqla_testing" + score = 100 + + def options(self, parser, env=os.environ): + Plugin.options(self, parser, env) + opt = parser.add_option + + def make_option(name, **kw): + callback_ = kw.pop("callback", None) + if callback_: + + def wrap_(option, opt_str, value, parser): + callback_(opt_str, value, parser) + + kw["callback"] = wrap_ + opt(name, **kw) + + plugin_base.setup_options(make_option) + plugin_base.read_config() + + def configure(self, options, conf): + super(NoseSQLAlchemy, self).configure(options, conf) + plugin_base.pre_begin(options) + + plugin_base.set_coverage_flag(options.enable_plugin_coverage) + + def begin(self): + global fixtures + from alembic.testing import fixtures # noqa + + plugin_base.post_begin() + + def describeTest(self, test): + return "" + + def wantFunction(self, fn): + return False + + def wantMethod(self, fn): + if py3k: + if not hasattr(fn.__self__, "cls"): + return False + cls = fn.__self__.cls + else: + cls = fn.im_class + return plugin_base.want_method(cls, fn) + + def wantClass(self, cls): + return plugin_base.want_class(cls) + + def beforeTest(self, test): + plugin_base.before_test( + test, + test.test.cls.__module__, + test.test.cls, + test.test.method.__name__, + ) + + def afterTest(self, test): + plugin_base.after_test(test) + + def startContext(self, ctx): + if not isinstance(ctx, type) or not issubclass(ctx, fixtures.TestBase): + return + plugin_base.start_test_class(ctx) + + def stopContext(self, ctx): + if not isinstance(ctx, type) or not issubclass(ctx, fixtures.TestBase): + return + plugin_base.stop_test_class(ctx) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/plugin/plugin_base.py b/py3/lib/python3.6/site-packages/alembic/testing/plugin/plugin_base.py new file mode 100644 index 0000000..44930c8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/plugin/plugin_base.py @@ -0,0 +1,645 @@ +# plugin/plugin_base.py +# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Testing extensions. + +this module is designed to work as a testing-framework-agnostic library, +so that we can continue to support nose and also begin adding new +functionality via py.test. + +NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; +this should be removable when Alembic targets SQLAlchemy 1.0.0 + + +""" + +from __future__ import absolute_import + +import re +import sys + +try: + # unitttest has a SkipTest also but pytest doesn't + # honor it unless nose is imported too... + from nose import SkipTest +except ImportError: + from pytest import skip + + SkipTest = skip.Exception + +py3k = sys.version_info.major >= 3 + +if py3k: + import configparser +else: + import ConfigParser as configparser + +# late imports +fixtures = None +engines = None +provision = None +exclusions = None +warnings = None +assertions = None +requirements = None +config = None +util = None +file_config = None + + +logging = None +include_tags = set() +exclude_tags = set() +options = None + + +def setup_options(make_option): + make_option( + "--log-info", + action="callback", + type="string", + callback=_log, + help="turn on info logging for (multiple OK)", + ) + make_option( + "--log-debug", + action="callback", + type="string", + callback=_log, + help="turn on debug logging for (multiple OK)", + ) + make_option( + "--db", + action="append", + type="string", + dest="db", + help="Use prefab database uri. Multiple OK, " + "first one is run by default.", + ) + make_option( + "--dbs", + action="callback", + zeroarg_callback=_list_dbs, + help="List available prefab dbs", + ) + make_option( + "--dburi", + action="append", + type="string", + dest="dburi", + help="Database uri. Multiple OK, " "first one is run by default.", + ) + make_option( + "--dropfirst", + action="store_true", + dest="dropfirst", + help="Drop all tables in the target database first", + ) + make_option( + "--backend-only", + action="store_true", + dest="backend_only", + help="Run only tests marked with __backend__", + ) + make_option( + "--postgresql-templatedb", + type="string", + help="name of template database to use for Postgresql " + "CREATE DATABASE (defaults to current database)", + ) + make_option( + "--low-connections", + action="store_true", + dest="low_connections", + help="Use a low number of distinct connections - " + "i.e. for Oracle TNS", + ) + make_option( + "--write-idents", + type="string", + dest="write_idents", + help="write out generated follower idents to , " + "when -n is used", + ) + make_option( + "--reversetop", + action="store_true", + dest="reversetop", + default=False, + help="Use a random-ordering set implementation in the ORM " + "(helps reveal dependency issues)", + ) + make_option( + "--requirements", + action="callback", + type="string", + callback=_requirements_opt, + help="requirements class for testing, overrides setup.cfg", + ) + make_option( + "--with-cdecimal", + action="store_true", + dest="cdecimal", + default=False, + help="Monkeypatch the cdecimal library into Python 'decimal' " + "for all tests", + ) + make_option( + "--include-tag", + action="callback", + callback=_include_tag, + type="string", + help="Include tests with tag ", + ) + make_option( + "--exclude-tag", + action="callback", + callback=_exclude_tag, + type="string", + help="Exclude tests with tag ", + ) + make_option( + "--mysql-engine", + action="store", + dest="mysql_engine", + default=None, + help="Use the specified MySQL storage engine for all tables, " + "default is a db-default/InnoDB combo.", + ) + + +def configure_follower(follower_ident): + """Configure required state for a follower. + + This invokes in the parent process and typically includes + database creation. + + """ + from alembic.testing import provision + + provision.FOLLOWER_IDENT = follower_ident + + +def memoize_important_follower_config(dict_): + """Store important configuration we will need to send to a follower. + + This invokes in the parent process after normal config is set up. + + This is necessary as py.test seems to not be using forking, so we + start with nothing in memory, *but* it isn't running our argparse + callables, so we have to just copy all of that over. + + """ + dict_["memoized_config"] = { + "include_tags": include_tags, + "exclude_tags": exclude_tags, + } + + +def restore_important_follower_config(dict_): + """Restore important configuration needed by a follower. + + This invokes in the follower process. + + """ + include_tags.update(dict_["memoized_config"]["include_tags"]) + exclude_tags.update(dict_["memoized_config"]["exclude_tags"]) + + +def read_config(): + global file_config + file_config = configparser.ConfigParser() + file_config.read(["setup.cfg", "test.cfg"]) + + +def pre_begin(opt): + """things to set up early, before coverage might be setup.""" + global options + options = opt + for fn in pre_configure: + fn(options, file_config) + + +def set_coverage_flag(value): + options.has_coverage = value + + +def post_begin(): + """things to set up later, once we know coverage is running.""" + + # Lazy setup of other options (post coverage) + for fn in post_configure: + fn(options, file_config) + + # late imports, has to happen after config as well + # as nose plugins like coverage + global util, fixtures, engines, exclusions, assertions + global warnings, profiling, config, testing + from alembic.testing import config, warnings, exclusions # noqa + from alembic.testing import engines, fixtures # noqa + from sqlalchemy import util # noqa + + warnings.setup_filters() + + +def _log(opt_str, value, parser): + global logging + if not logging: + import logging + + logging.basicConfig() + + if opt_str.endswith("-info"): + logging.getLogger(value).setLevel(logging.INFO) + elif opt_str.endswith("-debug"): + logging.getLogger(value).setLevel(logging.DEBUG) + + +def _list_dbs(*args): + print("Available --db options (use --dburi to override)") + for macro in sorted(file_config.options("db")): + print("%20s\t%s" % (macro, file_config.get("db", macro))) + sys.exit(0) + + +def _requirements_opt(opt_str, value, parser): + _setup_requirements(value) + + +def _exclude_tag(opt_str, value, parser): + exclude_tags.add(value.replace("-", "_")) + + +def _include_tag(opt_str, value, parser): + include_tags.add(value.replace("-", "_")) + + +pre_configure = [] +post_configure = [] + + +def pre(fn): + pre_configure.append(fn) + return fn + + +def post(fn): + post_configure.append(fn) + return fn + + +@pre +def _setup_options(opt, file_config): + global options + options = opt + + +@pre +def _monkeypatch_cdecimal(options, file_config): + if options.cdecimal: + import cdecimal + + sys.modules["decimal"] = cdecimal + + +@post +def _engine_uri(options, file_config): + from alembic.testing import config + from alembic.testing import provision + + if options.dburi: + db_urls = list(options.dburi) + else: + db_urls = [] + + if options.db: + for db_token in options.db: + for db in re.split(r"[,\s]+", db_token): + if db not in file_config.options("db"): + raise RuntimeError( + "Unknown URI specifier '%s'. " + "Specify --dbs for known uris." % db + ) + else: + db_urls.append(file_config.get("db", db)) + + if not db_urls: + db_urls.append(file_config.get("db", "default")) + + for db_url in db_urls: + + if options.write_idents and provision.FOLLOWER_IDENT: # != 'master': + with open(options.write_idents, "a") as file_: + file_.write(provision.FOLLOWER_IDENT + " " + db_url + "\n") + + cfg = provision.setup_config( + db_url, options, file_config, provision.FOLLOWER_IDENT + ) + + if not config._current: + cfg.set_as_current(cfg) + + +@post +def _requirements(options, file_config): + + requirement_cls = file_config.get("sqla_testing", "requirement_cls") + _setup_requirements(requirement_cls) + + +def _setup_requirements(argument): + from alembic.testing import config + + if config.requirements is not None: + return + + modname, clsname = argument.split(":") + + # importlib.import_module() only introduced in 2.7, a little + # late + mod = __import__(modname) + for component in modname.split(".")[1:]: + mod = getattr(mod, component) + req_cls = getattr(mod, clsname) + + config.requirements = req_cls() + + +@post +def _prep_testing_database(options, file_config): + from alembic.testing import config + from alembic.testing.exclusions import against + from sqlalchemy import schema + from alembic import util + + from sqlalchemy import inspect + + if options.dropfirst: + for cfg in config.Config.all_configs(): + e = cfg.db + inspector = inspect(e) + try: + view_names = inspector.get_view_names() + except NotImplementedError: + pass + else: + for vname in view_names: + e.execute( + schema._DropView( + schema.Table(vname, schema.MetaData()) + ) + ) + + if config.requirements.schemas.enabled_for_config(cfg): + try: + view_names = inspector.get_view_names(schema="test_schema") + except NotImplementedError: + pass + else: + for vname in view_names: + e.execute( + schema._DropView( + schema.Table( + vname, + schema.MetaData(), + schema="test_schema", + ) + ) + ) + + for tname in reversed( + inspector.get_table_names(order_by="foreign_key") + ): + e.execute( + schema.DropTable(schema.Table(tname, schema.MetaData())) + ) + + if config.requirements.schemas.enabled_for_config(cfg): + for tname in reversed( + inspector.get_table_names( + order_by="foreign_key", schema="test_schema" + ) + ): + e.execute( + schema.DropTable( + schema.Table( + tname, schema.MetaData(), schema="test_schema" + ) + ) + ) + + if against(cfg, "postgresql") and util.sqla_100: + from sqlalchemy.dialects import postgresql + + for enum in inspector.get_enums("*"): + e.execute( + postgresql.DropEnumType( + postgresql.ENUM( + name=enum["name"], schema=enum["schema"] + ) + ) + ) + + +@post +def _reverse_topological(options, file_config): + if options.reversetop: + from sqlalchemy.orm.util import randomize_unitofwork + + randomize_unitofwork() + + +@post +def _post_setup_options(opt, file_config): + from alembic.testing import config + + config.options = options + config.file_config = file_config + + +def want_class(cls): + if not issubclass(cls, fixtures.TestBase): + return False + elif cls.__name__.startswith("_"): + return False + elif config.options.backend_only and not getattr( + cls, "__backend__", False + ): + return False + else: + return True + + +def want_method(cls, fn): + if not fn.__name__.startswith("test_"): + return False + elif fn.__module__ is None: + return False + elif include_tags: + return ( + hasattr(cls, "__tags__") + and exclusions.tags(cls.__tags__).include_test( + include_tags, exclude_tags + ) + ) or ( + hasattr(fn, "_sa_exclusion_extend") + and fn._sa_exclusion_extend.include_test( + include_tags, exclude_tags + ) + ) + elif exclude_tags and hasattr(cls, "__tags__"): + return exclusions.tags(cls.__tags__).include_test( + include_tags, exclude_tags + ) + elif exclude_tags and hasattr(fn, "_sa_exclusion_extend"): + return fn._sa_exclusion_extend.include_test(include_tags, exclude_tags) + else: + return True + + +def generate_sub_tests(cls, module): + if getattr(cls, "__backend__", False): + for cfg in _possible_configs_for_cls(cls): + orig_name = cls.__name__ + + # we can have special chars in these names except for the + # pytest junit plugin, which is tripped up by the brackets + # and periods, so sanitize + + alpha_name = re.sub(r"[_\[\]\.]+", "_", cfg.name) + alpha_name = re.sub("_+$", "", alpha_name) + name = "%s_%s" % (cls.__name__, alpha_name) + + subcls = type( + name, + (cls,), + {"_sa_orig_cls_name": orig_name, "__only_on_config__": cfg}, + ) + setattr(module, name, subcls) + yield subcls + else: + yield cls + + +def start_test_class(cls): + _do_skips(cls) + _setup_engine(cls) + + +def stop_test_class(cls): + # from sqlalchemy import inspect + # assert not inspect(testing.db).get_table_names() + _restore_engine() + + +def _restore_engine(): + config._current.reset() + + +def _setup_engine(cls): + if getattr(cls, "__engine_options__", None): + eng = engines.testing_engine(options=cls.__engine_options__) + config._current.push_engine(eng) + + +def before_test(test, test_module_name, test_class, test_name): + pass + + +def after_test(test): + pass + + +def _possible_configs_for_cls(cls, reasons=None): + all_configs = set(config.Config.all_configs()) + + if cls.__unsupported_on__: + spec = exclusions.db_spec(*cls.__unsupported_on__) + for config_obj in list(all_configs): + if spec(config_obj): + all_configs.remove(config_obj) + + if getattr(cls, "__only_on__", None): + spec = exclusions.db_spec(*util.to_list(cls.__only_on__)) + for config_obj in list(all_configs): + if not spec(config_obj): + all_configs.remove(config_obj) + + if getattr(cls, "__only_on_config__", None): + all_configs.intersection_update([cls.__only_on_config__]) + + if hasattr(cls, "__requires__"): + requirements = config.requirements + for config_obj in list(all_configs): + for requirement in cls.__requires__: + check = getattr(requirements, requirement) + + skip_reasons = check.matching_config_reasons(config_obj) + if skip_reasons: + all_configs.remove(config_obj) + if reasons is not None: + reasons.extend(skip_reasons) + break + + if hasattr(cls, "__prefer_requires__"): + non_preferred = set() + requirements = config.requirements + for config_obj in list(all_configs): + for requirement in cls.__prefer_requires__: + check = getattr(requirements, requirement) + + if not check.enabled_for_config(config_obj): + non_preferred.add(config_obj) + if all_configs.difference(non_preferred): + all_configs.difference_update(non_preferred) + + return all_configs + + +def _do_skips(cls): + reasons = [] + all_configs = _possible_configs_for_cls(cls, reasons) + + if getattr(cls, "__skip_if__", False): + for c in getattr(cls, "__skip_if__"): + if c(): + raise SkipTest( + "'%s' skipped by %s" % (cls.__name__, c.__name__) + ) + + if not all_configs: + msg = "'%s' unsupported on any DB implementation %s%s" % ( + cls.__name__, + ", ".join( + "'%s(%s)+%s'" + % ( + config_obj.db.name, + ".".join( + str(dig) + for dig in config_obj.db.dialect.server_version_info + ), + config_obj.db.driver, + ) + for config_obj in config.Config.all_configs() + ), + ", ".join(reasons), + ) + raise SkipTest(msg) + elif hasattr(cls, "__prefer_backends__"): + non_preferred = set() + spec = exclusions.db_spec(*util.to_list(cls.__prefer_backends__)) + for config_obj in all_configs: + if not spec(config_obj): + non_preferred.add(config_obj) + if all_configs.difference(non_preferred): + all_configs.difference_update(non_preferred) + + if config._current not in all_configs: + _setup_config(all_configs.pop(), cls) + + +def _setup_config(config_obj, ctx): + config._current.push(config_obj) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/plugin/pytestplugin.py b/py3/lib/python3.6/site-packages/alembic/testing/plugin/pytestplugin.py new file mode 100644 index 0000000..5a3bb73 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/plugin/pytestplugin.py @@ -0,0 +1,235 @@ +"""NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; + this should be removable when Alembic targets SQLAlchemy 1.0.0. +""" + +try: + # installed by bootstrap.py + import alembic_plugin_base as plugin_base +except ImportError: + # assume we're a package, use traditional import + from . import plugin_base + +import argparse +import collections +import inspect +import os +import sys + +import pytest + +py3k = sys.version_info.major >= 3 + +try: + import xdist # noqa + + has_xdist = True +except ImportError: + has_xdist = False + + +def pytest_addoption(parser): + group = parser.getgroup("sqlalchemy") + + def make_option(name, **kw): + callback_ = kw.pop("callback", None) + if callback_: + + class CallableAction(argparse.Action): + def __call__( + self, parser, namespace, values, option_string=None + ): + callback_(option_string, values, parser) + + kw["action"] = CallableAction + + zeroarg_callback = kw.pop("zeroarg_callback", None) + if zeroarg_callback: + + class CallableAction(argparse.Action): + def __init__( + self, + option_strings, + dest, + default=False, + required=False, + help=None, # noqa + ): + super(CallableAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + const=True, + default=default, + required=required, + help=help, + ) + + def __call__( + self, parser, namespace, values, option_string=None + ): + zeroarg_callback(option_string, values, parser) + + kw["action"] = CallableAction + + group.addoption(name, **kw) + + plugin_base.setup_options(make_option) + plugin_base.read_config() + + +def pytest_configure(config): + if hasattr(config, "slaveinput"): + plugin_base.restore_important_follower_config(config.slaveinput) + plugin_base.configure_follower(config.slaveinput["follower_ident"]) + else: + if config.option.write_idents and os.path.exists( + config.option.write_idents + ): + os.remove(config.option.write_idents) + + plugin_base.pre_begin(config.option) + + plugin_base.set_coverage_flag( + bool(getattr(config.option, "cov_source", False)) + ) + + +def pytest_sessionstart(session): + plugin_base.post_begin() + + +if has_xdist: + import uuid + + def pytest_configure_node(node): + # the master for each node fills slaveinput dictionary + # which pytest-xdist will transfer to the subprocess + + plugin_base.memoize_important_follower_config(node.slaveinput) + + node.slaveinput["follower_ident"] = "test_%s" % uuid.uuid4().hex[0:12] + from alembic.testing import provision + + provision.create_follower_db(node.slaveinput["follower_ident"]) + + def pytest_testnodedown(node, error): + from alembic.testing import provision + + provision.drop_follower_db(node.slaveinput["follower_ident"]) + + +def pytest_collection_modifyitems(session, config, items): + # look for all those classes that specify __backend__ and + # expand them out into per-database test cases. + + # this is much easier to do within pytest_pycollect_makeitem, however + # pytest is iterating through cls.__dict__ as makeitem is + # called which causes a "dictionary changed size" error on py3k. + # I'd submit a pullreq for them to turn it into a list first, but + # it's to suit the rather odd use case here which is that we are adding + # new classes to a module on the fly. + + rebuilt_items = collections.defaultdict(list) + items[:] = [ + item for item in items if isinstance(item.parent, pytest.Instance) + ] + test_classes = set(item.parent for item in items) + for test_class in test_classes: + for sub_cls in plugin_base.generate_sub_tests( + test_class.cls, test_class.parent.module + ): + if sub_cls is not test_class.cls: + list_ = rebuilt_items[test_class.cls] + + for inst in pytest.Class( + sub_cls.__name__, parent=test_class.parent.parent + ).collect(): + list_.extend(inst.collect()) + + newitems = [] + for item in items: + if item.parent.cls in rebuilt_items: + newitems.extend(rebuilt_items[item.parent.cls]) + rebuilt_items[item.parent.cls][:] = [] + else: + newitems.append(item) + + # seems like the functions attached to a test class aren't sorted already? + # is that true and why's that? (when using unittest, they're sorted) + items[:] = sorted( + newitems, + key=lambda item: ( + item.parent.parent.parent.name, + item.parent.parent.name, + item.name, + ), + ) + + +def pytest_pycollect_makeitem(collector, name, obj): + if inspect.isclass(obj) and plugin_base.want_class(obj): + return pytest.Class(name, parent=collector) + elif ( + inspect.isfunction(obj) + and isinstance(collector, pytest.Instance) + and plugin_base.want_method(collector.cls, obj) + ): + return pytest.Function(name, parent=collector) + else: + return [] + + +_current_class = None + + +def pytest_runtest_setup(item): + # here we seem to get called only based on what we collected + # in pytest_collection_modifyitems. So to do class-based stuff + # we have to tear that out. + global _current_class + + if not isinstance(item, pytest.Function): + return + + # ... so we're doing a little dance here to figure it out... + if _current_class is None: + class_setup(item.parent.parent) + _current_class = item.parent.parent + + # this is needed for the class-level, to ensure that the + # teardown runs after the class is completed with its own + # class-level teardown... + def finalize(): + global _current_class + class_teardown(item.parent.parent) + _current_class = None + + item.parent.parent.addfinalizer(finalize) + + test_setup(item) + + +def pytest_runtest_teardown(item): + # ...but this works better as the hook here rather than + # using a finalizer, as the finalizer seems to get in the way + # of the test reporting failures correctly (you get a bunch of + # py.test assertion stuff instead) + test_teardown(item) + + +def test_setup(item): + plugin_base.before_test( + item, item.parent.module.__name__, item.parent.cls, item.name + ) + + +def test_teardown(item): + plugin_base.after_test(item) + + +def class_setup(item): + plugin_base.start_test_class(item.cls) + + +def class_teardown(item): + plugin_base.stop_test_class(item.cls) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/provision.py b/py3/lib/python3.6/site-packages/alembic/testing/provision.py new file mode 100644 index 0000000..9687b75 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/provision.py @@ -0,0 +1,351 @@ +"""NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; + this should be removable when Alembic targets SQLAlchemy 1.0.0 +""" +import collections +import logging +import os +import time + +from sqlalchemy import create_engine +from sqlalchemy import exc +from sqlalchemy import text +from sqlalchemy.engine import url as sa_url + +from . import config +from . import engines +from .compat import get_url_backend_name +from ..util import compat + +log = logging.getLogger(__name__) + +FOLLOWER_IDENT = None + + +class register(object): + def __init__(self): + self.fns = {} + + @classmethod + def init(cls, fn): + return register().for_db("*")(fn) + + def for_db(self, dbname): + def decorate(fn): + self.fns[dbname] = fn + return self + + return decorate + + def __call__(self, cfg, *arg): + if isinstance(cfg, compat.string_types): + url = sa_url.make_url(cfg) + elif isinstance(cfg, sa_url.URL): + url = cfg + else: + url = cfg.db.url + backend = get_url_backend_name(url) + if backend in self.fns: + return self.fns[backend](cfg, *arg) + else: + return self.fns["*"](cfg, *arg) + + +def create_follower_db(follower_ident): + + for cfg in _configs_for_db_operation(): + _create_db(cfg, cfg.db, follower_ident) + + +def configure_follower(follower_ident): + for cfg in config.Config.all_configs(): + _configure_follower(cfg, follower_ident) + + +def setup_config(db_url, options, file_config, follower_ident): + if follower_ident: + db_url = _follower_url_from_main(db_url, follower_ident) + db_opts = {} + _update_db_opts(db_url, db_opts) + eng = engines.testing_engine(db_url, db_opts) + _post_configure_engine(db_url, eng, follower_ident) + eng.connect().close() + + cfg = config.Config.register(eng, db_opts, options, file_config) + if follower_ident: + _configure_follower(cfg, follower_ident) + return cfg + + +def drop_follower_db(follower_ident): + for cfg in _configs_for_db_operation(): + _drop_db(cfg, cfg.db, follower_ident) + + +def _configs_for_db_operation(): + hosts = set() + + for cfg in config.Config.all_configs(): + cfg.db.dispose() + + for cfg in config.Config.all_configs(): + url = cfg.db.url + backend = get_url_backend_name(url) + host_conf = (backend, url.username, url.host, url.database) + + if host_conf not in hosts: + yield cfg + hosts.add(host_conf) + + for cfg in config.Config.all_configs(): + cfg.db.dispose() + + +@register.init +def _create_db(cfg, eng, ident): + raise NotImplementedError("no DB creation routine for cfg: %s" % eng.url) + + +@register.init +def _drop_db(cfg, eng, ident): + raise NotImplementedError("no DB drop routine for cfg: %s" % eng.url) + + +@register.init +def _update_db_opts(db_url, db_opts): + pass + + +@register.init +def _configure_follower(cfg, ident): + pass + + +@register.init +def _post_configure_engine(url, engine, follower_ident): + pass + + +@register.init +def _follower_url_from_main(url, ident): + url = sa_url.make_url(url) + url.database = ident + return url + + +@_update_db_opts.for_db("mssql") +def _mssql_update_db_opts(db_url, db_opts): + db_opts["legacy_schema_aliasing"] = False + + +@_follower_url_from_main.for_db("sqlite") +def _sqlite_follower_url_from_main(url, ident): + url = sa_url.make_url(url) + if not url.database or url.database == ":memory:": + return url + else: + return sa_url.make_url("sqlite:///%s.db" % ident) + + +@_post_configure_engine.for_db("sqlite") +def _sqlite_post_configure_engine(url, engine, follower_ident): + from sqlalchemy import event + + @event.listens_for(engine, "connect") + def connect(dbapi_connection, connection_record): + # use file DBs in all cases, memory acts kind of strangely + # as an attached + if not follower_ident: + dbapi_connection.execute( + 'ATTACH DATABASE "test_schema.db" AS test_schema' + ) + else: + dbapi_connection.execute( + 'ATTACH DATABASE "%s_test_schema.db" AS test_schema' + % follower_ident + ) + + +@_create_db.for_db("postgresql") +def _pg_create_db(cfg, eng, ident): + template_db = cfg.options.postgresql_templatedb + + with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: + try: + _pg_drop_db(cfg, conn, ident) + except Exception: + pass + if not template_db: + template_db = conn.scalar("select current_database()") + + attempt = 0 + while True: + try: + conn.execute( + "CREATE DATABASE %s TEMPLATE %s" % (ident, template_db) + ) + except exc.OperationalError as err: + attempt += 1 + if attempt >= 3: + raise + if "accessed by other users" in str(err): + log.info( + "Waiting to create %s, URI %r, " + "template DB %s is in use sleeping for .5", + ident, + eng.url, + template_db, + ) + time.sleep(0.5) + else: + break + + +@_create_db.for_db("mysql") +def _mysql_create_db(cfg, eng, ident): + with eng.connect() as conn: + try: + _mysql_drop_db(cfg, conn, ident) + except Exception: + pass + conn.execute("CREATE DATABASE %s" % ident) + conn.execute("CREATE DATABASE %s_test_schema" % ident) + conn.execute("CREATE DATABASE %s_test_schema_2" % ident) + + +@_configure_follower.for_db("mysql") +def _mysql_configure_follower(config, ident): + config.test_schema = "%s_test_schema" % ident + config.test_schema_2 = "%s_test_schema_2" % ident + + +@_create_db.for_db("sqlite") +def _sqlite_create_db(cfg, eng, ident): + pass + + +@_drop_db.for_db("postgresql") +def _pg_drop_db(cfg, eng, ident): + with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: + conn.execute( + text( + "select pg_terminate_backend(pid) from pg_stat_activity " + "where usename=current_user and pid != pg_backend_pid() " + "and datname=:dname" + ), + dname=ident, + ) + conn.execute("DROP DATABASE %s" % ident) + + +@_drop_db.for_db("sqlite") +def _sqlite_drop_db(cfg, eng, ident): + if ident: + os.remove("%s_test_schema.db" % ident) + else: + os.remove("%s.db" % ident) + + +@_drop_db.for_db("mysql") +def _mysql_drop_db(cfg, eng, ident): + with eng.connect() as conn: + conn.execute("DROP DATABASE %s_test_schema" % ident) + conn.execute("DROP DATABASE %s_test_schema_2" % ident) + conn.execute("DROP DATABASE %s" % ident) + + +@_create_db.for_db("oracle") +def _oracle_create_db(cfg, eng, ident): + # NOTE: make sure you've run "ALTER DATABASE default tablespace users" or + # similar, so that the default tablespace is not "system"; reflection will + # fail otherwise + with eng.connect() as conn: + conn.execute("create user %s identified by xe" % ident) + conn.execute("create user %s_ts1 identified by xe" % ident) + conn.execute("create user %s_ts2 identified by xe" % ident) + conn.execute("grant dba to %s" % (ident,)) + conn.execute("grant unlimited tablespace to %s" % ident) + conn.execute("grant unlimited tablespace to %s_ts1" % ident) + conn.execute("grant unlimited tablespace to %s_ts2" % ident) + + +@_configure_follower.for_db("oracle") +def _oracle_configure_follower(config, ident): + config.test_schema = "%s_ts1" % ident + config.test_schema_2 = "%s_ts2" % ident + + +def _ora_drop_ignore(conn, dbname): + try: + conn.execute("drop user %s cascade" % dbname) + log.info("Reaped db: %s" % dbname) + return True + except exc.DatabaseError as err: + log.warning("couldn't drop db: %s" % err) + return False + + +@_drop_db.for_db("oracle") +def _oracle_drop_db(cfg, eng, ident): + with eng.connect() as conn: + # cx_Oracle seems to occasionally leak open connections when a large + # suite it run, even if we confirm we have zero references to + # connection objects. + # while there is a "kill session" command in Oracle, + # it unfortunately does not release the connection sufficiently. + _ora_drop_ignore(conn, ident) + _ora_drop_ignore(conn, "%s_ts1" % ident) + _ora_drop_ignore(conn, "%s_ts2" % ident) + + +def reap_oracle_dbs(idents_file): + log.info("Reaping Oracle dbs...") + + urls = collections.defaultdict(list) + with open(idents_file) as file_: + for line in file_: + line = line.strip() + db_name, db_url = line.split(" ") + urls[db_url].append(db_name) + + for url in urls: + if not url.startswith("oracle"): + continue + idents = urls[url] + log.info("db reaper connecting to %r", url) + eng = create_engine(url) + with eng.connect() as conn: + + log.info("identifiers in file: %s", ", ".join(idents)) + + to_reap = conn.execute( + "select u.username from all_users u where username " + "like 'TEST_%' and not exists (select username " + "from v$session where username=u.username)" + ) + all_names = set(username.lower() for (username,) in to_reap) + to_drop = set() + for name in all_names: + if name.endswith("_ts1") or name.endswith("_ts2"): + continue + elif name in idents: + to_drop.add(name) + if "%s_ts1" % name in all_names: + to_drop.add("%s_ts1" % name) + if "%s_ts2" % name in all_names: + to_drop.add("%s_ts2" % name) + + dropped = total = 0 + for total, username in enumerate(to_drop, 1): + if _ora_drop_ignore(conn, username): + dropped += 1 + log.info( + "Dropped %d out of %d stale databases detected", dropped, total + ) + + +@_follower_url_from_main.for_db("oracle") +def _oracle_follower_url_from_main(url, ident): + url = sa_url.make_url(url) + url.username = ident + url.password = "xe" + return url diff --git a/py3/lib/python3.6/site-packages/alembic/testing/requirements.py b/py3/lib/python3.6/site-packages/alembic/testing/requirements.py new file mode 100644 index 0000000..cf570a5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/requirements.py @@ -0,0 +1,196 @@ +import sys + +from alembic import util +from alembic.util import sqla_compat +from . import exclusions + +if util.sqla_094: + from sqlalchemy.testing.requirements import Requirements +else: + + class Requirements(object): + pass + + +class SuiteRequirements(Requirements): + @property + def schemas(self): + """Target database must support external schemas, and have one + named 'test_schema'.""" + + return exclusions.open() + + @property + def unique_constraint_reflection(self): + def doesnt_have_check_uq_constraints(config): + from sqlalchemy import inspect + + # temporary + if config.db.name == "oracle": + return True + + insp = inspect(config.db) + try: + insp.get_unique_constraints("x") + except NotImplementedError: + return True + except TypeError: + return True + except Exception: + pass + return False + + return exclusions.skip_if(doesnt_have_check_uq_constraints) + + @property + def foreign_key_match(self): + return exclusions.open() + + @property + def check_constraints_w_enforcement(self): + """Target database must support check constraints + and also enforce them.""" + + return exclusions.open() + + @property + def reflects_pk_names(self): + return exclusions.closed() + + @property + def reflects_fk_options(self): + return exclusions.closed() + + @property + def sqlalchemy_12(self): + return exclusions.skip_if( + lambda config: not util.sqla_1216, + "SQLAlchemy 1.2.16 or greater required", + ) + + @property + def sqlalchemy_10(self): + return exclusions.skip_if( + lambda config: not util.sqla_100, + "SQLAlchemy 1.0.0 or greater required", + ) + + @property + def fail_before_sqla_100(self): + return exclusions.fails_if( + lambda config: not util.sqla_100, + "SQLAlchemy 1.0.0 or greater required", + ) + + @property + def fail_before_sqla_1010(self): + return exclusions.fails_if( + lambda config: not util.sqla_1010, + "SQLAlchemy 1.0.10 or greater required", + ) + + @property + def fail_before_sqla_099(self): + return exclusions.fails_if( + lambda config: not util.sqla_099, + "SQLAlchemy 0.9.9 or greater required", + ) + + @property + def fail_before_sqla_110(self): + return exclusions.fails_if( + lambda config: not util.sqla_110, + "SQLAlchemy 1.1.0 or greater required", + ) + + @property + def sqlalchemy_092(self): + return exclusions.skip_if( + lambda config: not util.sqla_092, + "SQLAlchemy 0.9.2 or greater required", + ) + + @property + def sqlalchemy_094(self): + return exclusions.skip_if( + lambda config: not util.sqla_094, + "SQLAlchemy 0.9.4 or greater required", + ) + + @property + def sqlalchemy_099(self): + return exclusions.skip_if( + lambda config: not util.sqla_099, + "SQLAlchemy 0.9.9 or greater required", + ) + + @property + def sqlalchemy_100(self): + return exclusions.skip_if( + lambda config: not util.sqla_100, + "SQLAlchemy 1.0.0 or greater required", + ) + + @property + def sqlalchemy_1014(self): + return exclusions.skip_if( + lambda config: not util.sqla_1014, + "SQLAlchemy 1.0.14 or greater required", + ) + + @property + def sqlalchemy_1115(self): + return exclusions.skip_if( + lambda config: not util.sqla_1115, + "SQLAlchemy 1.1.15 or greater required", + ) + + @property + def sqlalchemy_110(self): + return exclusions.skip_if( + lambda config: not util.sqla_110, + "SQLAlchemy 1.1.0 or greater required", + ) + + @property + def sqlalchemy_issue_4436(self): + def check(config): + vers = sqla_compat._vers + + if vers == (1, 3, 0, "b1"): + return True + elif vers >= (1, 2, 16): + return False + else: + return True + + return exclusions.skip_if( + check, "SQLAlchemy 1.2.16, 1.3.0b2 or greater required" + ) + + @property + def python3(self): + return exclusions.skip_if( + lambda: sys.version_info < (3,), "Python version 3.xx is required." + ) + + @property + def pep3147(self): + + return exclusions.only_if(lambda config: util.compat.has_pep3147()) + + @property + def comments(self): + return exclusions.only_if( + lambda config: sqla_compat._dialect_supports_comments( + config.db.dialect + ) + ) + + @property + def comments_api(self): + return exclusions.only_if(lambda config: util.sqla_120) + + @property + def alter_column(self): + return exclusions.open() diff --git a/py3/lib/python3.6/site-packages/alembic/testing/runner.py b/py3/lib/python3.6/site-packages/alembic/testing/runner.py new file mode 100644 index 0000000..da5e0f4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/runner.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# testing/runner.py +# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" +Nose test runner module. + +This script is a front-end to "nosetests" which +installs SQLAlchemy's testing plugin into the local environment. + +The script is intended to be used by third-party dialects and extensions +that run within SQLAlchemy's testing framework. The runner can +be invoked via:: + + python -m alembic.testing.runner + +The script is then essentially the same as the "nosetests" script, including +all of the usual Nose options. The test environment requires that a +setup.cfg is locally present including various required options. + +Note that when using this runner, Nose's "coverage" plugin will not be +able to provide coverage for SQLAlchemy itself, since SQLAlchemy is +imported into sys.modules before coverage is started. The special +script sqla_nose.py is provided as a top-level script which loads the +plugin in a special (somewhat hacky) way so that coverage against +SQLAlchemy itself is possible. + +""" +import nose + +from .plugin.noseplugin import NoseSQLAlchemy + + +def main(): + nose.main(addplugins=[NoseSQLAlchemy()]) + + +def setup_py_test(): + """Runner to use for the 'test_suite' entry of your setup.py. + + Prevents any name clash shenanigans from the command line + argument "test" that the "setup.py test" command sends + to nose. + + """ + nose.main(addplugins=[NoseSQLAlchemy()], argv=["runner"]) diff --git a/py3/lib/python3.6/site-packages/alembic/testing/util.py b/py3/lib/python3.6/site-packages/alembic/testing/util.py new file mode 100644 index 0000000..b2b3476 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/util.py @@ -0,0 +1,19 @@ +from sqlalchemy.util import decorator + + +@decorator +def provide_metadata(fn, *args, **kw): + """Provide bound MetaData for a single test, dropping afterwards.""" + + from . import config + from sqlalchemy import schema + + metadata = schema.MetaData(config.db) + self = args[0] + prev_meta = getattr(self, "metadata", None) + self.metadata = metadata + try: + return fn(*args, **kw) + finally: + metadata.drop_all() + self.metadata = prev_meta diff --git a/py3/lib/python3.6/site-packages/alembic/testing/warnings.py b/py3/lib/python3.6/site-packages/alembic/testing/warnings.py new file mode 100644 index 0000000..27ba706 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/testing/warnings.py @@ -0,0 +1,45 @@ +# testing/warnings.py +# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; + this should be removable when Alembic targets SQLAlchemy 0.9.4. +""" + +from __future__ import absolute_import + +import re +import warnings + +from sqlalchemy import exc as sa_exc + + +def setup_filters(): + """Set global warning behavior for the test suite.""" + warnings.filterwarnings( + "ignore", category=sa_exc.SAPendingDeprecationWarning + ) + warnings.filterwarnings("error", category=sa_exc.SADeprecationWarning) + warnings.filterwarnings("error", category=sa_exc.SAWarning) + warnings.filterwarnings("error", category=DeprecationWarning) + + +def assert_warnings(fn, warning_msgs, regex=False): + """Assert that each of the given warnings are emitted by fn.""" + + from .assertions import eq_ + + with warnings.catch_warnings(record=True) as log: + # ensure that nothing is going into __warningregistry__ + warnings.filterwarnings("always") + + result = fn() + for warning in log: + popwarn = warning_msgs.pop(0) + if regex: + assert re.match(popwarn, str(warning.message)) + else: + eq_(popwarn, str(warning.message)) + return result diff --git a/py3/lib/python3.6/site-packages/alembic/util/__init__.py b/py3/lib/python3.6/site-packages/alembic/util/__init__.py new file mode 100644 index 0000000..88b7431 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/util/__init__.py @@ -0,0 +1,39 @@ +from .exc import CommandError +from .langhelpers import _with_legacy_names # noqa +from .langhelpers import asbool # noqa +from .langhelpers import dedupe_tuple # noqa +from .langhelpers import Dispatcher # noqa +from .langhelpers import immutabledict # noqa +from .langhelpers import memoized_property # noqa +from .langhelpers import ModuleClsProxy # noqa +from .langhelpers import rev_id # noqa +from .langhelpers import to_list # noqa +from .langhelpers import to_tuple # noqa +from .messaging import err # noqa +from .messaging import format_as_comma # noqa +from .messaging import msg # noqa +from .messaging import obfuscate_url_pw # noqa +from .messaging import status # noqa +from .messaging import warn # noqa +from .messaging import write_outstream # noqa +from .pyfiles import coerce_resource_to_filename # noqa +from .pyfiles import edit # noqa +from .pyfiles import load_python_file # noqa +from .pyfiles import pyc_file_from_path # noqa +from .pyfiles import template_to_file # noqa +from .sqla_compat import sqla_09 # noqa +from .sqla_compat import sqla_092 # noqa +from .sqla_compat import sqla_094 # noqa +from .sqla_compat import sqla_099 # noqa +from .sqla_compat import sqla_100 # noqa +from .sqla_compat import sqla_1010 # noqa +from .sqla_compat import sqla_1014 # noqa +from .sqla_compat import sqla_105 # noqa +from .sqla_compat import sqla_110 # noqa +from .sqla_compat import sqla_1115 # noqa +from .sqla_compat import sqla_120 # noqa +from .sqla_compat import sqla_1216 # noqa + + +if not sqla_09: + raise CommandError("SQLAlchemy 0.9.0 or greater is required. ") diff --git a/py3/lib/python3.6/site-packages/alembic/util/compat.py b/py3/lib/python3.6/site-packages/alembic/util/compat.py new file mode 100644 index 0000000..d57df9e --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/util/compat.py @@ -0,0 +1,358 @@ +import collections +import inspect +import io +import sys + +if sys.version_info < (2, 7): + raise NotImplementedError("Python 2.7 or greater is required.") + +py27 = sys.version_info >= (2, 7) +py2k = sys.version_info.major < 3 +py3k = sys.version_info.major >= 3 +py33 = sys.version_info >= (3, 3) +py35 = sys.version_info >= (3, 5) +py36 = sys.version_info >= (3, 6) + + +ArgSpec = collections.namedtuple( + "ArgSpec", ["args", "varargs", "keywords", "defaults"] +) + + +def inspect_getargspec(func): + """getargspec based on fully vendored getfullargspec from Python 3.3.""" + + if inspect.ismethod(func): + func = func.__func__ + if not inspect.isfunction(func): + raise TypeError("{!r} is not a Python function".format(func)) + + co = func.__code__ + if not inspect.iscode(co): + raise TypeError("{!r} is not a code object".format(co)) + + nargs = co.co_argcount + names = co.co_varnames + nkwargs = co.co_kwonlyargcount if py3k else 0 + args = list(names[:nargs]) + + nargs += nkwargs + varargs = None + if co.co_flags & inspect.CO_VARARGS: + varargs = co.co_varnames[nargs] + nargs = nargs + 1 + varkw = None + if co.co_flags & inspect.CO_VARKEYWORDS: + varkw = co.co_varnames[nargs] + + return ArgSpec(args, varargs, varkw, func.__defaults__) + + +if py3k: + from io import StringIO +else: + # accepts strings + from StringIO import StringIO # noqa + +if py3k: + import builtins as compat_builtins + + string_types = (str,) + binary_type = bytes + text_type = str + + def callable(fn): # noqa + return hasattr(fn, "__call__") + + def u(s): + return s + + def ue(s): + return s + + range = range # noqa +else: + import __builtin__ as compat_builtins + + string_types = (basestring,) # noqa + binary_type = str + text_type = unicode # noqa + callable = callable # noqa + + def u(s): + return unicode(s, "utf-8") # noqa + + def ue(s): + return unicode(s, "unicode_escape") # noqa + + range = xrange # noqa + +if py33: + import collections.abc as collections_abc +else: + import collections as collections_abc # noqa + +if py35: + from inspect import formatannotation + + def inspect_formatargspec( + args, + varargs=None, + varkw=None, + defaults=None, + kwonlyargs=(), + kwonlydefaults={}, + annotations={}, + formatarg=str, + formatvarargs=lambda name: "*" + name, + formatvarkw=lambda name: "**" + name, + formatvalue=lambda value: "=" + repr(value), + formatreturns=lambda text: " -> " + text, + formatannotation=formatannotation, + ): + """Copy formatargspec from python 3.7 standard library. + + Python 3 has deprecated formatargspec and requested that Signature + be used instead, however this requires a full reimplementation + of formatargspec() in terms of creating Parameter objects and such. + Instead of introducing all the object-creation overhead and having + to reinvent from scratch, just copy their compatibility routine. + + """ + + def formatargandannotation(arg): + result = formatarg(arg) + if arg in annotations: + result += ": " + formatannotation(annotations[arg]) + return result + + specs = [] + if defaults: + firstdefault = len(args) - len(defaults) + for i, arg in enumerate(args): + spec = formatargandannotation(arg) + if defaults and i >= firstdefault: + spec = spec + formatvalue(defaults[i - firstdefault]) + specs.append(spec) + if varargs is not None: + specs.append(formatvarargs(formatargandannotation(varargs))) + else: + if kwonlyargs: + specs.append("*") + if kwonlyargs: + for kwonlyarg in kwonlyargs: + spec = formatargandannotation(kwonlyarg) + if kwonlydefaults and kwonlyarg in kwonlydefaults: + spec += formatvalue(kwonlydefaults[kwonlyarg]) + specs.append(spec) + if varkw is not None: + specs.append(formatvarkw(formatargandannotation(varkw))) + result = "(" + ", ".join(specs) + ")" + if "return" in annotations: + result += formatreturns(formatannotation(annotations["return"])) + return result + + +else: + from inspect import formatargspec as inspect_formatargspec # noqa + + +if py3k: + from configparser import ConfigParser as SafeConfigParser + import configparser +else: + from ConfigParser import SafeConfigParser # noqa + import ConfigParser as configparser # noqa + +if py2k: + from mako.util import parse_encoding + +if py35: + import importlib.util + import importlib.machinery + + def load_module_py(module_id, path): + spec = importlib.util.spec_from_file_location(module_id, path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + def load_module_pyc(module_id, path): + spec = importlib.util.spec_from_file_location(module_id, path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +elif py3k: + import importlib.machinery + + def load_module_py(module_id, path): + module = importlib.machinery.SourceFileLoader( + module_id, path + ).load_module(module_id) + del sys.modules[module_id] + return module + + def load_module_pyc(module_id, path): + module = importlib.machinery.SourcelessFileLoader( + module_id, path + ).load_module(module_id) + del sys.modules[module_id] + return module + + +if py3k: + + def get_bytecode_suffixes(): + try: + return importlib.machinery.BYTECODE_SUFFIXES + except AttributeError: + return importlib.machinery.DEBUG_BYTECODE_SUFFIXES + + def get_current_bytecode_suffixes(): + if py35: + suffixes = importlib.machinery.BYTECODE_SUFFIXES + else: + if sys.flags.optimize: + suffixes = importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES + else: + suffixes = importlib.machinery.BYTECODE_SUFFIXES + + return suffixes + + def has_pep3147(): + # http://www.python.org/dev/peps/pep-3147/#detecting-pep-3147-availability + + import imp + + return hasattr(imp, "get_tag") + + +else: + import imp + + def load_module_py(module_id, path): # noqa + with open(path, "rb") as fp: + mod = imp.load_source(module_id, path, fp) + if py2k: + source_encoding = parse_encoding(fp) + if source_encoding: + mod._alembic_source_encoding = source_encoding + del sys.modules[module_id] + return mod + + def load_module_pyc(module_id, path): # noqa + with open(path, "rb") as fp: + mod = imp.load_compiled(module_id, path, fp) + # no source encoding here + del sys.modules[module_id] + return mod + + def get_current_bytecode_suffixes(): + if sys.flags.optimize: + return [".pyo"] # e.g. .pyo + else: + return [".pyc"] # e.g. .pyc + + def has_pep3147(): + return False + + +try: + exec_ = getattr(compat_builtins, "exec") +except AttributeError: + # Python 2 + def exec_(func_text, globals_, lcl): + exec("exec func_text in globals_, lcl") + + +################################################ +# cross-compatible metaclass implementation +# Copyright (c) 2010-2012 Benjamin Peterson + + +def with_metaclass(meta, base=object): + """Create a base class with a metaclass.""" + return meta("%sBase" % meta.__name__, (base,), {}) + + +################################################ + +if py3k: + + def reraise(tp, value, tb=None, cause=None): + if cause is not None: + value.__cause__ = cause + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + def raise_from_cause(exception, exc_info=None): + if exc_info is None: + exc_info = sys.exc_info() + exc_type, exc_value, exc_tb = exc_info + reraise(type(exception), exception, tb=exc_tb, cause=exc_value) + + +else: + exec( + "def reraise(tp, value, tb=None, cause=None):\n" + " raise tp, value, tb\n" + ) + + def raise_from_cause(exception, exc_info=None): + # not as nice as that of Py3K, but at least preserves + # the code line where the issue occurred + if exc_info is None: + exc_info = sys.exc_info() + exc_type, exc_value, exc_tb = exc_info + reraise(type(exception), exception, tb=exc_tb) + + +# produce a wrapper that allows encoded text to stream +# into a given buffer, but doesn't close it. +# not sure of a more idiomatic approach to this. +class EncodedIO(io.TextIOWrapper): + def close(self): + pass + + +if py2k: + # in Py2K, the io.* package is awkward because it does not + # easily wrap the file type (e.g. sys.stdout) and I can't + # figure out at all how to wrap StringIO.StringIO (used by nosetests) + # and also might be user specified too. So create a full + # adapter. + + class ActLikePy3kIO(object): + + """Produce an object capable of wrapping either + sys.stdout (e.g. file) *or* StringIO.StringIO(). + + """ + + def _false(self): + return False + + def _true(self): + return True + + readable = seekable = _false + writable = _true + closed = False + + def __init__(self, file_): + self.file_ = file_ + + def write(self, text): + return self.file_.write(text) + + def flush(self): + return self.file_.flush() + + class EncodedIO(EncodedIO): + def __init__(self, file_, encoding): + super(EncodedIO, self).__init__( + ActLikePy3kIO(file_), encoding=encoding + ) diff --git a/py3/lib/python3.6/site-packages/alembic/util/exc.py b/py3/lib/python3.6/site-packages/alembic/util/exc.py new file mode 100644 index 0000000..f7ad021 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/util/exc.py @@ -0,0 +1,2 @@ +class CommandError(Exception): + pass diff --git a/py3/lib/python3.6/site-packages/alembic/util/langhelpers.py b/py3/lib/python3.6/site-packages/alembic/util/langhelpers.py new file mode 100644 index 0000000..bb9c8f5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/util/langhelpers.py @@ -0,0 +1,320 @@ +import collections +import textwrap +import uuid +import warnings + +from .compat import callable +from .compat import collections_abc +from .compat import exec_ +from .compat import inspect_getargspec +from .compat import string_types +from .compat import with_metaclass + + +class _ModuleClsMeta(type): + def __setattr__(cls, key, value): + super(_ModuleClsMeta, cls).__setattr__(key, value) + cls._update_module_proxies(key) + + +class ModuleClsProxy(with_metaclass(_ModuleClsMeta)): + """Create module level proxy functions for the + methods on a given class. + + The functions will have a compatible signature + as the methods. + + """ + + _setups = collections.defaultdict(lambda: (set(), [])) + + @classmethod + def _update_module_proxies(cls, name): + attr_names, modules = cls._setups[cls] + for globals_, locals_ in modules: + cls._add_proxied_attribute(name, globals_, locals_, attr_names) + + def _install_proxy(self): + attr_names, modules = self._setups[self.__class__] + for globals_, locals_ in modules: + globals_["_proxy"] = self + for attr_name in attr_names: + globals_[attr_name] = getattr(self, attr_name) + + def _remove_proxy(self): + attr_names, modules = self._setups[self.__class__] + for globals_, locals_ in modules: + globals_["_proxy"] = None + for attr_name in attr_names: + del globals_[attr_name] + + @classmethod + def create_module_class_proxy(cls, globals_, locals_): + attr_names, modules = cls._setups[cls] + modules.append((globals_, locals_)) + cls._setup_proxy(globals_, locals_, attr_names) + + @classmethod + def _setup_proxy(cls, globals_, locals_, attr_names): + for methname in dir(cls): + cls._add_proxied_attribute(methname, globals_, locals_, attr_names) + + @classmethod + def _add_proxied_attribute(cls, methname, globals_, locals_, attr_names): + if not methname.startswith("_"): + meth = getattr(cls, methname) + if callable(meth): + locals_[methname] = cls._create_method_proxy( + methname, globals_, locals_ + ) + else: + attr_names.add(methname) + + @classmethod + def _create_method_proxy(cls, name, globals_, locals_): + fn = getattr(cls, name) + + def _name_error(name): + raise NameError( + "Can't invoke function '%s', as the proxy object has " + "not yet been " + "established for the Alembic '%s' class. " + "Try placing this code inside a callable." + % (name, cls.__name__) + ) + + globals_["_name_error"] = _name_error + + translations = getattr(fn, "_legacy_translations", []) + if translations: + spec = inspect_getargspec(fn) + if spec[0] and spec[0][0] == "self": + spec[0].pop(0) + + outer_args = inner_args = "*args, **kw" + translate_str = "args, kw = _translate(%r, %r, %r, args, kw)" % ( + fn.__name__, + tuple(spec), + translations, + ) + + def translate(fn_name, spec, translations, args, kw): + return_kw = {} + return_args = [] + + for oldname, newname in translations: + if oldname in kw: + warnings.warn( + "Argument %r is now named %r " + "for method %s()." % (oldname, newname, fn_name) + ) + return_kw[newname] = kw.pop(oldname) + return_kw.update(kw) + + args = list(args) + if spec[3]: + pos_only = spec[0][: -len(spec[3])] + else: + pos_only = spec[0] + for arg in pos_only: + if arg not in return_kw: + try: + return_args.append(args.pop(0)) + except IndexError: + raise TypeError( + "missing required positional argument: %s" + % arg + ) + return_args.extend(args) + + return return_args, return_kw + + globals_["_translate"] = translate + else: + outer_args = "*args, **kw" + inner_args = "*args, **kw" + translate_str = "" + + func_text = textwrap.dedent( + """\ + def %(name)s(%(args)s): + %(doc)r + %(translate)s + try: + p = _proxy + except NameError: + _name_error('%(name)s') + return _proxy.%(name)s(%(apply_kw)s) + e + """ + % { + "name": name, + "translate": translate_str, + "args": outer_args, + "apply_kw": inner_args, + "doc": fn.__doc__, + } + ) + lcl = {} + exec_(func_text, globals_, lcl) + return lcl[name] + + +def _with_legacy_names(translations): + def decorate(fn): + fn._legacy_translations = translations + return fn + + return decorate + + +def asbool(value): + return value is not None and value.lower() == "true" + + +def rev_id(): + return uuid.uuid4().hex[-12:] + + +def to_list(x, default=None): + if x is None: + return default + elif isinstance(x, string_types): + return [x] + elif isinstance(x, collections_abc.Iterable): + return list(x) + else: + return [x] + + +def to_tuple(x, default=None): + if x is None: + return default + elif isinstance(x, string_types): + return (x,) + elif isinstance(x, collections_abc.Iterable): + return tuple(x) + else: + return (x,) + + +def unique_list(seq, hashfunc=None): + seen = set() + seen_add = seen.add + if not hashfunc: + return [x for x in seq if x not in seen and not seen_add(x)] + else: + return [ + x + for x in seq + if hashfunc(x) not in seen and not seen_add(hashfunc(x)) + ] + + +def dedupe_tuple(tup): + return tuple(unique_list(tup)) + + +class memoized_property(object): + + """A read-only @property that is only evaluated once.""" + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + obj.__dict__[self.__name__] = result = self.fget(obj) + return result + + +class immutabledict(dict): + def _immutable(self, *arg, **kw): + raise TypeError("%s object is immutable" % self.__class__.__name__) + + __delitem__ = ( + __setitem__ + ) = __setattr__ = clear = pop = popitem = setdefault = update = _immutable + + def __new__(cls, *args): + new = dict.__new__(cls) + dict.__init__(new, *args) + return new + + def __init__(self, *args): + pass + + def __reduce__(self): + return immutabledict, (dict(self),) + + def union(self, d): + if not self: + return immutabledict(d) + else: + d2 = immutabledict(self) + dict.update(d2, d) + return d2 + + def __repr__(self): + return "immutabledict(%s)" % dict.__repr__(self) + + +class Dispatcher(object): + def __init__(self, uselist=False): + self._registry = {} + self.uselist = uselist + + def dispatch_for(self, target, qualifier="default"): + def decorate(fn): + if self.uselist: + self._registry.setdefault((target, qualifier), []).append(fn) + else: + assert (target, qualifier) not in self._registry + self._registry[(target, qualifier)] = fn + return fn + + return decorate + + def dispatch(self, obj, qualifier="default"): + + if isinstance(obj, string_types): + targets = [obj] + elif isinstance(obj, type): + targets = obj.__mro__ + else: + targets = type(obj).__mro__ + + for spcls in targets: + if qualifier != "default" and (spcls, qualifier) in self._registry: + return self._fn_or_list(self._registry[(spcls, qualifier)]) + elif (spcls, "default") in self._registry: + return self._fn_or_list(self._registry[(spcls, "default")]) + else: + raise ValueError("no dispatch function for object: %s" % obj) + + def _fn_or_list(self, fn_or_list): + if self.uselist: + + def go(*arg, **kw): + for fn in fn_or_list: + fn(*arg, **kw) + + return go + else: + return fn_or_list + + def branch(self): + """Return a copy of this dispatcher that is independently + writable.""" + + d = Dispatcher() + if self.uselist: + d._registry.update( + (k, [fn for fn in self._registry[k]]) for k in self._registry + ) + else: + d._registry.update(self._registry) + return d diff --git a/py3/lib/python3.6/site-packages/alembic/util/messaging.py b/py3/lib/python3.6/site-packages/alembic/util/messaging.py new file mode 100644 index 0000000..6a30133 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/util/messaging.py @@ -0,0 +1,98 @@ +import logging +import sys +import textwrap +import warnings + +from sqlalchemy.engine import url + +from .compat import binary_type +from .compat import collections_abc +from .compat import py27 +from .compat import string_types + +log = logging.getLogger(__name__) + +if py27: + # disable "no handler found" errors + logging.getLogger("alembic").addHandler(logging.NullHandler()) + + +try: + import fcntl + import termios + import struct + + ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)) + _h, TERMWIDTH, _hp, _wp = struct.unpack("HHHH", ioctl) + if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty + TERMWIDTH = None +except (ImportError, IOError): + TERMWIDTH = None + + +def write_outstream(stream, *text): + encoding = getattr(stream, "encoding", "ascii") or "ascii" + for t in text: + if not isinstance(t, binary_type): + t = t.encode(encoding, "replace") + t = t.decode(encoding) + try: + stream.write(t) + except IOError: + # suppress "broken pipe" errors. + # no known way to handle this on Python 3 however + # as the exception is "ignored" (noisily) in TextIOWrapper. + break + + +def status(_statmsg, fn, *arg, **kw): + msg(_statmsg + " ...", False) + try: + ret = fn(*arg, **kw) + write_outstream(sys.stdout, " done\n") + return ret + except: + write_outstream(sys.stdout, " FAILED\n") + raise + + +def err(message): + log.error(message) + msg("FAILED: %s" % message) + sys.exit(-1) + + +def obfuscate_url_pw(u): + u = url.make_url(u) + if u.password: + u.password = "XXXXX" + return str(u) + + +def warn(msg, stacklevel=2): + warnings.warn(msg, stacklevel=stacklevel) + + +def msg(msg, newline=True): + if TERMWIDTH is None: + write_outstream(sys.stdout, msg) + if newline: + write_outstream(sys.stdout, "\n") + else: + # left indent output lines + lines = textwrap.wrap(msg, TERMWIDTH) + if len(lines) > 1: + for line in lines[0:-1]: + write_outstream(sys.stdout, " ", line, "\n") + write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else "")) + + +def format_as_comma(value): + if value is None: + return "" + elif isinstance(value, string_types): + return value + elif isinstance(value, collections_abc.Iterable): + return ", ".join(value) + else: + raise ValueError("Don't know how to comma-format %r" % value) diff --git a/py3/lib/python3.6/site-packages/alembic/util/pyfiles.py b/py3/lib/python3.6/site-packages/alembic/util/pyfiles.py new file mode 100644 index 0000000..6e9b542 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/util/pyfiles.py @@ -0,0 +1,99 @@ +import os +import re +import tempfile + +from mako import exceptions +from mako.template import Template + +from .compat import get_current_bytecode_suffixes +from .compat import has_pep3147 +from .compat import load_module_py +from .compat import load_module_pyc +from .exc import CommandError + + +def template_to_file(template_file, dest, output_encoding, **kw): + template = Template(filename=template_file) + try: + output = template.render_unicode(**kw).encode(output_encoding) + except: + with tempfile.NamedTemporaryFile(suffix=".txt", delete=False) as ntf: + ntf.write( + exceptions.text_error_template() + .render_unicode() + .encode(output_encoding) + ) + fname = ntf.name + raise CommandError( + "Template rendering failed; see %s for a " + "template-oriented traceback." % fname + ) + else: + with open(dest, "wb") as f: + f.write(output) + + +def coerce_resource_to_filename(fname): + """Interpret a filename as either a filesystem location or as a package + resource. + + Names that are non absolute paths and contain a colon + are interpreted as resources and coerced to a file location. + + """ + if not os.path.isabs(fname) and ":" in fname: + import pkg_resources + + fname = pkg_resources.resource_filename(*fname.split(":")) + return fname + + +def pyc_file_from_path(path): + """Given a python source path, locate the .pyc. + + """ + + if has_pep3147(): + import imp + + candidate = imp.cache_from_source(path) + if os.path.exists(candidate): + return candidate + + filepath, ext = os.path.splitext(path) + for ext in get_current_bytecode_suffixes(): + if os.path.exists(filepath + ext): + return filepath + ext + else: + return None + + +def edit(path): + """Given a source path, run the EDITOR for it""" + + import editor + + try: + editor.edit(path) + except Exception as exc: + raise CommandError("Error executing editor (%s)" % (exc,)) + + +def load_python_file(dir_, filename): + """Load a file from the given path as a Python module.""" + + module_id = re.sub(r"\W", "_", filename) + path = os.path.join(dir_, filename) + _, ext = os.path.splitext(filename) + if ext == ".py": + if os.path.exists(path): + module = load_module_py(module_id, path) + else: + pyc_path = pyc_file_from_path(path) + if pyc_path is None: + raise ImportError("Can't find Python file %s" % path) + else: + module = load_module_pyc(module_id, pyc_path) + elif ext in (".pyc", ".pyo"): + module = load_module_pyc(module_id, path) + return module diff --git a/py3/lib/python3.6/site-packages/alembic/util/sqla_compat.py b/py3/lib/python3.6/site-packages/alembic/util/sqla_compat.py new file mode 100644 index 0000000..52d4e01 --- /dev/null +++ b/py3/lib/python3.6/site-packages/alembic/util/sqla_compat.py @@ -0,0 +1,246 @@ +import re + +from sqlalchemy import __version__ +from sqlalchemy import schema +from sqlalchemy import sql +from sqlalchemy import types as sqltypes +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.schema import CheckConstraint +from sqlalchemy.schema import Column +from sqlalchemy.schema import ForeignKeyConstraint +from sqlalchemy.sql.expression import _BindParamClause +from sqlalchemy.sql.expression import _TextClause as TextClause +from sqlalchemy.sql.visitors import traverse + +from . import compat + + +def _safe_int(value): + try: + return int(value) + except: + return value + + +_vers = tuple( + [_safe_int(x) for x in re.findall(r"(\d+|[abc]\d)", __version__)] +) +sqla_09 = _vers >= (0, 9, 0) +sqla_092 = _vers >= (0, 9, 2) +sqla_094 = _vers >= (0, 9, 4) +sqla_094 = _vers >= (0, 9, 4) +sqla_099 = _vers >= (0, 9, 9) +sqla_100 = _vers >= (1, 0, 0) +sqla_105 = _vers >= (1, 0, 5) +sqla_1010 = _vers >= (1, 0, 10) +sqla_110 = _vers >= (1, 1, 0) +sqla_1014 = _vers >= (1, 0, 14) +sqla_1115 = _vers >= (1, 1, 15) +sqla_120 = _vers >= (1, 2, 0) +sqla_1216 = _vers >= (1, 2, 16) + + +if sqla_110: + AUTOINCREMENT_DEFAULT = "auto" +else: + AUTOINCREMENT_DEFAULT = True + + +def _table_for_constraint(constraint): + if isinstance(constraint, ForeignKeyConstraint): + return constraint.parent + else: + return constraint.table + + +def _columns_for_constraint(constraint): + if isinstance(constraint, ForeignKeyConstraint): + return [fk.parent for fk in constraint.elements] + elif isinstance(constraint, CheckConstraint): + return _find_columns(constraint.sqltext) + else: + return list(constraint.columns) + + +def _fk_spec(constraint): + if sqla_100: + source_columns = [ + constraint.columns[key].name for key in constraint.column_keys + ] + else: + source_columns = [ + element.parent.name for element in constraint.elements + ] + + source_table = constraint.parent.name + source_schema = constraint.parent.schema + target_schema = constraint.elements[0].column.table.schema + target_table = constraint.elements[0].column.table.name + target_columns = [element.column.name for element in constraint.elements] + ondelete = constraint.ondelete + onupdate = constraint.onupdate + deferrable = constraint.deferrable + initially = constraint.initially + return ( + source_schema, + source_table, + source_columns, + target_schema, + target_table, + target_columns, + onupdate, + ondelete, + deferrable, + initially, + ) + + +def _fk_is_self_referential(constraint): + spec = constraint.elements[0]._get_colspec() + tokens = spec.split(".") + tokens.pop(-1) # colname + tablekey = ".".join(tokens) + return tablekey == constraint.parent.key + + +def _is_type_bound(constraint): + # this deals with SQLAlchemy #3260, don't copy CHECK constraints + # that will be generated by the type. + if sqla_100: + # new feature added for #3260 + return constraint._type_bound + else: + # old way, look at what we know Boolean/Enum to use + return constraint._create_rule is not None and isinstance( + getattr(constraint._create_rule, "target", None), + sqltypes.SchemaType, + ) + + +def _find_columns(clause): + """locate Column objects within the given expression.""" + + cols = set() + traverse(clause, {}, {"column": cols.add}) + return cols + + +def _remove_column_from_collection(collection, column): + """remove a column from a ColumnCollection.""" + + # workaround for older SQLAlchemy, remove the + # same object that's present + to_remove = collection[column.key] + collection.remove(to_remove) + + +def _textual_index_column(table, text_): + """a workaround for the Index construct's severe lack of flexibility""" + if isinstance(text_, compat.string_types): + c = Column(text_, sqltypes.NULLTYPE) + table.append_column(c) + return c + elif isinstance(text_, TextClause): + return _textual_index_element(table, text_) + else: + raise ValueError("String or text() construct expected") + + +class _textual_index_element(sql.ColumnElement): + """Wrap around a sqlalchemy text() construct in such a way that + we appear like a column-oriented SQL expression to an Index + construct. + + The issue here is that currently the Postgresql dialect, the biggest + recipient of functional indexes, keys all the index expressions to + the corresponding column expressions when rendering CREATE INDEX, + so the Index we create here needs to have a .columns collection that + is the same length as the .expressions collection. Ultimately + SQLAlchemy should support text() expressions in indexes. + + See SQLAlchemy issue 3174. + + """ + + __visit_name__ = "_textual_idx_element" + + def __init__(self, table, text): + self.table = table + self.text = text + self.key = text.text + self.fake_column = schema.Column(self.text.text, sqltypes.NULLTYPE) + table.append_column(self.fake_column) + + def get_children(self): + return [self.fake_column] + + +@compiles(_textual_index_element) +def _render_textual_index_column(element, compiler, **kw): + return compiler.process(element.text, **kw) + + +class _literal_bindparam(_BindParamClause): + pass + + +@compiles(_literal_bindparam) +def _render_literal_bindparam(element, compiler, **kw): + return compiler.render_literal_bindparam(element, **kw) + + +def _get_index_expressions(idx): + return list(idx.expressions) + + +def _get_index_column_names(idx): + return [getattr(exp, "name", None) for exp in _get_index_expressions(idx)] + + +def _get_index_final_name(dialect, idx): + # trying to keep the truncation rules totally localized on the + # SQLA side while also stepping around the quoting issue. Ideally + # the _prepared_index_name() method on the SQLA side would have + # a quoting option or the truncation routine would be broken out. + # + # test for SQLA quoted_name construct, introduced in + # 0.9 or thereabouts. + # this doesn't work in 0.8 and the "quote" option on Index doesn't + # seem to work in 0.8 either. + if hasattr(idx.name, "quote"): + # might be quoted_name, might be truncated_name, keep it the + # same + quoted_name_cls = type(idx.name) + new_name = quoted_name_cls(str(idx.name), quote=False) + idx = schema.Index(name=new_name) + return dialect.ddl_compiler(dialect, None)._prepared_index_name(idx) + + +def _dialect_supports_comments(dialect): + if sqla_120: + return dialect.supports_comments + else: + return False + + +def _comment_attribute(obj): + """return the .comment attribute from a Table or Column""" + + if sqla_120: + return obj.comment + else: + return None + + +def _is_mariadb(mysql_dialect): + return ( + mysql_dialect.server_version_info + and "MariaDB" in mysql_dialect.server_version_info + ) + + +def _mariadb_normalized_version_info(mysql_dialect): + if len(mysql_dialect.server_version_info) > 5: + return mysql_dialect.server_version_info[3:] + else: + return mysql_dialect.server_version_info diff --git a/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/LICENSE b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/LICENSE new file mode 100644 index 0000000..727ded8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2013 Chris Smith + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/METADATA b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/METADATA new file mode 100644 index 0000000..b43611e --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/METADATA @@ -0,0 +1,153 @@ +Metadata-Version: 2.1 +Name: arrow +Version: 0.14.5 +Summary: Better dates & times for Python +Home-page: https://arrow.readthedocs.io/en/latest/ +Author: Chris Smith +Author-email: crsmithdev@gmail.com +License: Apache 2.0 +Project-URL: Repository, https://github.com/crsmithdev/arrow +Project-URL: Bug Reports, https://github.com/crsmithdev/arrow/issues +Project-URL: Documentation, https://arrow.readthedocs.io +Keywords: arrow date time datetime timestamp timezone +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +Description-Content-Type: text/x-rst +Requires-Dist: python-dateutil +Requires-Dist: backports.functools-lru-cache (>=1.2.1) ; python_version == "2.7" + +Arrow: Better dates & times for Python +====================================== + +.. start-inclusion-marker-do-not-remove + +.. image:: https://travis-ci.org/crsmithdev/arrow.svg?branch=master + :alt: Build Status + :target: https://travis-ci.org/crsmithdev/arrow + +.. image:: https://codecov.io/github/crsmithdev/arrow/coverage.svg?branch=master + :alt: Codecov + :target: https://codecov.io/github/crsmithdev/arrow + +.. image:: https://img.shields.io/pypi/v/arrow.svg + :alt: PyPI Version + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/pypi/pyversions/arrow.svg + :alt: Supported Python Versions + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/pypi/l/arrow.svg + :alt: License + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :alt: Code Style: Black + :target: https://github.com/python/black + +What? +----- + +Arrow is a Python library that offers a sensible, human-friendly approach to creating, manipulating, formatting and converting dates, times, and timestamps. It implements and updates the datetime type, plugging gaps in functionality, and provides an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code. + +Arrow is heavily inspired by `moment.js `_ and `requests `_. + +Why? +---- + +Python's standard library and some other low-level modules have near-complete date, time and timezone functionality but don't work very well from a usability perspective: + +- Too many modules: datetime, time, calendar, dateutil, pytz and more +- Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc. +- Timezones and timestamp conversions are verbose and unpleasant +- Timezone naivety is the norm +- Gaps in functionality: ISO-8601 parsing, timespans, humanization + +Features +-------- + +- Fully implemented, drop-in replacement for datetime +- Supports Python 2.7, 3.5, 3.6, 3.7 and 3.8 +- Timezone-aware & UTC by default +- Provides super-simple creation options for many common input scenarios +- Updated :code:`replace` method with support for relative offsets, including weeks +- Formats and parses strings automatically +- Partial support for ISO-8601 +- Timezone conversion +- Timestamp available as a property +- Generates time spans, ranges, floors and ceilings in timeframes from year to microsecond +- Humanizes and supports a growing list of contributed locales +- Extensible for your own Arrow-derived types + +Quick Start +----------- + +Installation +~~~~~~~~~~~~ + +To install Arrow, use `pip `_ or `pipenv `_: + +.. code-block:: console + + $ pip install -U arrow + +Example Usage +~~~~~~~~~~~~~ + +.. code-block:: python + + >>> import arrow + >>> utc = arrow.utcnow() + >>> utc + + + >>> utc = utc.replace(hours=-1) + >>> utc + + + >>> local = utc.to('US/Pacific') + >>> local + + + >>> arrow.get('2013-05-11T21:23:58.970460+00:00') + + + >>> local.timestamp + 1368303838 + + >>> local.format() + '2013-05-11 13:23:58 -07:00' + + >>> local.format('YYYY-MM-DD HH:mm:ss ZZ') + '2013-05-11 13:23:58 -07:00' + + >>> local.humanize() + 'an hour ago' + + >>> local.humanize(locale='ko_kr') + '1시간 전' + +.. end-inclusion-marker-do-not-remove + +Documentation +------------- + +For full documentation, please visit `arrow.readthedocs.io `_. + +Contributing +------------ + +Contributions are welcome for both code and localization. To get started, find an issue or feature to tackle on `the issue tracker `_ and then fork `the repository `_ on GitHub to begin making changes. If you would like to help with localization, please see `locales.py `_ for what locales are currently supported. If you are helping with code, make sure to add tests to ensure that a bug was fixed or the feature works as intended. + + diff --git a/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/RECORD b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/RECORD new file mode 100644 index 0000000..0a5bbdb --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/RECORD @@ -0,0 +1,24 @@ +arrow/__init__.py,sha256=EDUi1LhVMu-m8T0aXYjKNejJ27OHRPuZIS-BPnNrATw,151 +arrow/_version.py,sha256=NAUBX_trzQBx5ZMxPGVgGrbbz9R2O-FqcIBv7OYAuIg,23 +arrow/api.py,sha256=If7DvsjCnVhn9UWqMs49L3v5WhD6Kk279SLjetaym6c,1182 +arrow/arrow.py,sha256=e051JcbR64Tgh9G4xJEzLNG1LC88jFo4jf8zRWy9wsI,42619 +arrow/factory.py,sha256=oDu8q35eE324UnF_xP7EQlazhywMOwWBNSa_mWFznO8,10106 +arrow/formatter.py,sha256=IfGxEDjSxREsPTkH7J9Z16cvyuxOIF-lV2AlPCF9gcM,3495 +arrow/locales.py,sha256=pUUQwSOb-RPsbOKe0bAgryhaeEnKUlOy415Q8lI2tL0,88777 +arrow/parser.py,sha256=3ex2sro7_LYupr_p2GGA_Za0eexd05IXW4B4Bc66H64,11463 +arrow/util.py,sha256=inZSvSbI4iXWC5uWW716bsXnZGQUCFxy5Dz6AXSL43o,2581 +arrow-0.14.5.dist-info/LICENSE,sha256=pLdgG-UFacLJapgY_ICbAUlBDITJlxTWDJ1PsK6GH6I,579 +arrow-0.14.5.dist-info/METADATA,sha256=gQ9qYTk9ReMSKJ-vi0_-eARIevCRAggiDQZnofRcavA,5660 +arrow-0.14.5.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +arrow-0.14.5.dist-info/top_level.txt,sha256=aCBThK2RIB824ctI3l9i6z94l8UYpFF-BC4m3dDzFFo,6 +arrow-0.14.5.dist-info/RECORD,, +arrow-0.14.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +arrow/__pycache__/factory.cpython-36.pyc,, +arrow/__pycache__/api.cpython-36.pyc,, +arrow/__pycache__/util.cpython-36.pyc,, +arrow/__pycache__/parser.cpython-36.pyc,, +arrow/__pycache__/arrow.cpython-36.pyc,, +arrow/__pycache__/locales.cpython-36.pyc,, +arrow/__pycache__/formatter.cpython-36.pyc,, +arrow/__pycache__/_version.cpython-36.pyc,, +arrow/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/WHEEL b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/WHEEL new file mode 100644 index 0000000..78e6f69 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/top_level.txt new file mode 100644 index 0000000..e2dc747 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow-0.14.5.dist-info/top_level.txt @@ -0,0 +1 @@ +arrow diff --git a/py3/lib/python3.6/site-packages/arrow/__init__.py b/py3/lib/python3.6/site-packages/arrow/__init__.py new file mode 100644 index 0000000..9b5fa68 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- +from ._version import __version__ +from .api import get, now, utcnow +from .arrow import Arrow +from .factory import ArrowFactory diff --git a/py3/lib/python3.6/site-packages/arrow/_version.py b/py3/lib/python3.6/site-packages/arrow/_version.py new file mode 100644 index 0000000..141826d --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/_version.py @@ -0,0 +1 @@ +__version__ = "0.14.5" diff --git a/py3/lib/python3.6/site-packages/arrow/api.py b/py3/lib/python3.6/site-packages/arrow/api.py new file mode 100644 index 0000000..c13fd71 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/api.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" +Provides the default implementation of :class:`ArrowFactory ` +methods for use as a module API. + +""" + +from __future__ import absolute_import + +from arrow.factory import ArrowFactory + +# internal default factory. +_factory = ArrowFactory() + + +def get(*args, **kwargs): + """ Calls the default :class:`ArrowFactory ` ``get`` method. + + """ + + return _factory.get(*args, **kwargs) + + +get.__doc__ = _factory.get.__doc__ + + +def utcnow(): + """ Calls the default :class:`ArrowFactory ` ``utcnow`` method. + + """ + + return _factory.utcnow() + + +utcnow.__doc__ = _factory.utcnow.__doc__ + + +def now(tz=None): + """ Calls the default :class:`ArrowFactory ` ``now`` method. + + """ + + return _factory.now(tz) + + +now.__doc__ = _factory.now.__doc__ + + +def factory(type): + """ Returns an :class:`.ArrowFactory` for the specified :class:`Arrow ` + or derived type. + + :param type: the type, :class:`Arrow ` or derived. + + """ + + return ArrowFactory(type) + + +__all__ = ["get", "utcnow", "now", "factory"] diff --git a/py3/lib/python3.6/site-packages/arrow/arrow.py b/py3/lib/python3.6/site-packages/arrow/arrow.py new file mode 100644 index 0000000..08b9b57 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/arrow.py @@ -0,0 +1,1357 @@ +# -*- coding: utf-8 -*- +""" +Provides the :class:`Arrow ` class, an enhanced ``datetime`` +replacement. + +""" + +from __future__ import absolute_import + +import calendar +import sys +from datetime import datetime, timedelta, tzinfo +from math import trunc + +from dateutil import tz as dateutil_tz +from dateutil.relativedelta import relativedelta + +from arrow import formatter, locales, parser, util + + +class Arrow(object): + """An :class:`Arrow ` object. + + Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing + additional functionality. + + :param year: the calendar year. + :param month: the calendar month. + :param day: the calendar day. + :param hour: (optional) the hour. Defaults to 0. + :param minute: (optional) the minute, Defaults to 0. + :param second: (optional) the second, Defaults to 0. + :param microsecond: (optional) the microsecond. Defaults 0. + :param tzinfo: (optional) A timezone expression. Defaults to UTC. + + .. _tz-expr: + + Recognized timezone expressions: + + - A ``tzinfo`` object. + - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. + - A ``str`` in ISO-8601 style, as in '+07:00'. + - A ``str``, one of the following: 'local', 'utc', 'UTC'. + + Usage:: + + >>> import arrow + >>> arrow.Arrow(2013, 5, 5, 12, 30, 45) + + + """ + + resolution = datetime.resolution + + _ATTRS = ["year", "month", "day", "hour", "minute", "second", "microsecond"] + _ATTRS_PLURAL = ["{}s".format(a) for a in _ATTRS] + _MONTHS_PER_QUARTER = 3 + + def __init__( + self, year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None + ): + + if util.isstr(tzinfo): + tzinfo = parser.TzinfoParser.parse(tzinfo) + tzinfo = tzinfo if tzinfo is not None else dateutil_tz.tzutc() + + self._datetime = datetime( + year, month, day, hour, minute, second, microsecond, tzinfo + ) + + # factories: single object, both original and from datetime. + + @classmethod + def now(cls, tzinfo=None): + """Constructs an :class:`Arrow ` object, representing "now" in the given + timezone. + + :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. + + Usage:: + + >>> arrow.now('Asia/Baku') + + + """ + + tzinfo = tzinfo if tzinfo is not None else dateutil_tz.tzlocal() + dt = datetime.now(tzinfo) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + ) + + @classmethod + def utcnow(cls): + """ Constructs an :class:`Arrow ` object, representing "now" in UTC + time. + + Usage:: + + >>> arrow.utcnow() + + + """ + + dt = datetime.now(dateutil_tz.tzutc()) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + ) + + @classmethod + def fromtimestamp(cls, timestamp, tzinfo=None): + """ Constructs an :class:`Arrow ` object from a timestamp, converted to + the given timezone. + + :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. + :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. + + Timestamps should always be UTC. If you have a non-UTC timestamp:: + + >>> arrow.Arrow.utcfromtimestamp(1367900664).replace(tzinfo='US/Pacific') + + + """ + + tzinfo = tzinfo if tzinfo is not None else dateutil_tz.tzlocal() + timestamp = cls._get_timestamp_from_input(timestamp) + dt = datetime.fromtimestamp(timestamp, tzinfo) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + ) + + @classmethod + def utcfromtimestamp(cls, timestamp): + """Constructs an :class:`Arrow ` object from a timestamp, in UTC time. + + :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. + + """ + + timestamp = cls._get_timestamp_from_input(timestamp) + dt = datetime.utcfromtimestamp(timestamp) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dateutil_tz.tzutc(), + ) + + @classmethod + def fromdatetime(cls, dt, tzinfo=None): + """ Constructs an :class:`Arrow ` object from a ``datetime`` and + optional replacement timezone. + + :param dt: the ``datetime`` + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to ``dt``'s + timezone, or UTC if naive. + + If you only want to replace the timezone of naive datetimes:: + + >>> dt + datetime.datetime(2013, 5, 5, 0, 0, tzinfo=tzutc()) + >>> arrow.Arrow.fromdatetime(dt, dt.tzinfo or 'US/Pacific') + + + """ + + if tzinfo is None: + if dt.tzinfo is None: + tzinfo = dateutil_tz.tzutc() + else: + tzinfo = dt.tzinfo + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + tzinfo, + ) + + @classmethod + def fromdate(cls, date, tzinfo=None): + """ Constructs an :class:`Arrow ` object from a ``date`` and optional + replacement timezone. Time values are set to 0. + + :param date: the ``date`` + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to UTC. + """ + + tzinfo = tzinfo if tzinfo is not None else dateutil_tz.tzutc() + + return cls(date.year, date.month, date.day, tzinfo=tzinfo) + + @classmethod + def strptime(cls, date_str, fmt, tzinfo=None): + """ Constructs an :class:`Arrow ` object from a date string and format, + in the style of ``datetime.strptime``. Optionally replaces the parsed timezone. + + :param date_str: the date string. + :param fmt: the format string. + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to the parsed + timezone if ``fmt`` contains a timezone directive, otherwise UTC. + + Usage:: + + >>> arrow.Arrow.strptime('20-01-2019 15:49:10', '%d-%m-%Y %H:%M:%S') + + + """ + + dt = datetime.strptime(date_str, fmt) + tzinfo = tzinfo if tzinfo is not None else dt.tzinfo + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + tzinfo, + ) + + # factories: ranges and spans + + @classmethod + def range(cls, frame, start, end=None, tz=None, limit=None): + """ Returns an iterator of :class:`Arrow ` objects, representing + points in time between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param tz: (optional) A :ref:`timezone expression `. Defaults to + ``start``'s timezone, or UTC if ``start`` is naive. + :param limit: (optional) A maximum number of tuples to return. + + **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to + return the entire range. Call with ``limit`` alone to return a maximum # of results from + the start. Call with both to cap a range at a maximum # of results. + + **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before + iterating. As such, either call with naive objects and ``tz``, or aware objects from the + same timezone and no ``tz``. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + Usage:: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.range('hour', start, end): + ... print(repr(r)) + ... + + + + + + + **NOTE**: Unlike Python's ``range``, ``end`` *may* be included in the returned iterator:: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 13, 30) + >>> for r in arrow.Arrow.range('hour', start, end): + ... print(repr(r)) + ... + + + + """ + + _, frame_relative, relative_steps = cls._get_frames(frame) + + tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) + + start = cls._get_datetime(start).replace(tzinfo=tzinfo) + end, limit = cls._get_iteration_params(end, limit) + end = cls._get_datetime(end).replace(tzinfo=tzinfo) + + current = cls.fromdatetime(start) + i = 0 + + while current <= end and i < limit: + i += 1 + yield current + + values = [getattr(current, f) for f in cls._ATTRS] + current = cls(*values, tzinfo=tzinfo) + relativedelta( + **{frame_relative: relative_steps} + ) + + @classmethod + def span_range(cls, frame, start, end, tz=None, limit=None): + """ Returns an iterator of tuples, each :class:`Arrow ` objects, + representing a series of timespans between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param tz: (optional) A :ref:`timezone expression `. Defaults to + ``start``'s timezone, or UTC if ``start`` is naive. + :param limit: (optional) A maximum number of tuples to return. + + **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to + return the entire range. Call with ``limit`` alone to return a maximum # of results from + the start. Call with both to cap a range at a maximum # of results. + + **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before + iterating. As such, either call with naive objects and ``tz``, or aware objects from the + same timezone and no ``tz``. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + **NOTE**: Unlike Python's ``range``, ``end`` will *always* be included in the returned + iterator of timespans. + + Usage: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.span_range('hour', start, end): + ... print(r) + ... + (, ) + (, ) + (, ) + (, ) + (, ) + (, ) + + """ + + tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) + start = cls.fromdatetime(start, tzinfo).span(frame)[0] + _range = cls.range(frame, start, end, tz, limit) + return (r.span(frame) for r in _range) + + @classmethod + def interval(cls, frame, start, end, interval=1, tz=None): + """ Returns an iterator of tuples, each :class:`Arrow ` objects, + representing a series of intervals between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param interval: (optional) Time interval for the given time frame. + :param tz: (optional) A timezone expression. Defaults to UTC. + + Supported frame values: year, quarter, month, week, day, hour, minute, second + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + Recognized timezone expressions: + + - A ``tzinfo`` object. + - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. + - A ``str`` in ISO-8601 style, as in '+07:00'. + - A ``str``, one of the following: 'local', 'utc', 'UTC'. + + Usage: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.interval('hour', start, end, 2): + ... print r + ... + (, ) + (, ) + (, ) + """ + if interval < 1: + raise ValueError("interval has to be a positive integer") + + spanRange = iter(cls.span_range(frame, start, end, tz)) + while True: + try: + intvlStart, intvlEnd = next(spanRange) + for _ in range(interval - 1): + _, intvlEnd = next(spanRange) + yield intvlStart, intvlEnd + except StopIteration: + return + + # representations + + def __repr__(self): + return "<{} [{}]>".format(self.__class__.__name__, self.__str__()) + + def __str__(self): + return self._datetime.isoformat() + + def __format__(self, formatstr): + + if len(formatstr) > 0: + return self.format(formatstr) + + return str(self) + + def __hash__(self): + return self._datetime.__hash__() + + # attributes & properties + + def __getattr__(self, name): + + if name == "week": + return self.isocalendar()[1] + + if name == "quarter": + return int((self.month - 1) / self._MONTHS_PER_QUARTER) + 1 + + if not name.startswith("_"): + value = getattr(self._datetime, name, None) + + if value is not None: + return value + + return object.__getattribute__(self, name) + + @property + def tzinfo(self): + """ Gets the ``tzinfo`` of the :class:`Arrow ` object. + + Usage:: + + >>> arw=arrow.utcnow() + >>> arw.tzinfo + tzutc() + + """ + + return self._datetime.tzinfo + + @tzinfo.setter + def tzinfo(self, tzinfo): + """ Sets the ``tzinfo`` of the :class:`Arrow ` object. """ + + self._datetime = self._datetime.replace(tzinfo=tzinfo) + + @property + def datetime(self): + """ Returns a datetime representation of the :class:`Arrow ` object. + + Usage:: + + >>> arw=arrow.utcnow() + >>> arw.datetime + datetime.datetime(2019, 1, 24, 16, 35, 27, 276649, tzinfo=tzutc()) + + """ + + return self._datetime + + @property + def naive(self): + """ Returns a naive datetime representation of the :class:`Arrow ` + object. + + Usage:: + + >>> nairobi = arrow.now('Africa/Nairobi') + >>> nairobi + + >>> nairobi.naive + datetime.datetime(2019, 1, 23, 19, 27, 12, 297999) + + """ + + return self._datetime.replace(tzinfo=None) + + @property + def timestamp(self): + """ Returns a timestamp representation of the :class:`Arrow ` object, in + UTC time. + + Usage:: + + >>> arrow.utcnow().timestamp + 1548260567 + + """ + + return calendar.timegm(self._datetime.utctimetuple()) + + @property + def float_timestamp(self): + """ Returns a floating-point representation of the :class:`Arrow ` + object, in UTC time. + + Usage:: + + >>> arrow.utcnow().float_timestamp + 1548260516.830896 + + """ + + return self.timestamp + float(self.microsecond) / 1000000 + + # mutation and duplication. + + def clone(self): + """ Returns a new :class:`Arrow ` object, cloned from the current one. + + Usage: + + >>> arw = arrow.utcnow() + >>> cloned = arw.clone() + + """ + + return self.fromdatetime(self._datetime) + + def replace(self, **kwargs): + """ Returns a new :class:`Arrow ` object with attributes updated + according to inputs. + + Use property names to set their value absolutely:: + + >>> import arrow + >>> arw = arrow.utcnow() + >>> arw + + >>> arw.replace(year=2014, month=6) + + + You can also replace the timezone without conversion, using a + :ref:`timezone expression `:: + + >>> arw.replace(tzinfo=tz.tzlocal()) + + + """ + + absolute_kwargs = {} + + for key, value in kwargs.items(): + + if key in self._ATTRS: + absolute_kwargs[key] = value + elif key in ["week", "quarter"]: + raise AttributeError("setting absolute {} is not supported".format(key)) + elif key != "tzinfo": + raise AttributeError('unknown attribute: "{}"'.format(key)) + + current = self._datetime.replace(**absolute_kwargs) + + tzinfo = kwargs.get("tzinfo") + + if tzinfo is not None: + tzinfo = self._get_tzinfo(tzinfo) + current = current.replace(tzinfo=tzinfo) + + return self.fromdatetime(current) + + def shift(self, **kwargs): + """ Returns a new :class:`Arrow ` object with attributes updated + according to inputs. + + Use pluralized property names to shift their current value relatively: + + >>> import arrow + >>> arw = arrow.utcnow() + >>> arw + + >>> arw.shift(years=1, months=-1) + + + Day-of-the-week relative shifting can use either Python's weekday numbers + (Monday = 0, Tuesday = 1 .. Sunday = 6) or using dateutil.relativedelta's + day instances (MO, TU .. SU). When using weekday numbers, the returned + date will always be greater than or equal to the starting date. + + Using the above code (which is a Saturday) and asking it to shift to Saturday: + + >>> arw.shift(weekday=5) + + + While asking for a Monday: + + >>> arw.shift(weekday=0) + + + """ + + relative_kwargs = {} + + for key, value in kwargs.items(): + + if key in self._ATTRS_PLURAL or key in ["weeks", "quarters", "weekday"]: + relative_kwargs[key] = value + else: + raise AttributeError() + + # core datetime does not support quarters, translate to months. + relative_kwargs.setdefault("months", 0) + relative_kwargs["months"] += ( + relative_kwargs.pop("quarters", 0) * self._MONTHS_PER_QUARTER + ) + + current = self._datetime + relativedelta(**relative_kwargs) + + return self.fromdatetime(current) + + def to(self, tz): + """ Returns a new :class:`Arrow ` object, converted + to the target timezone. + + :param tz: A :ref:`timezone expression `. + + Usage:: + + >>> utc = arrow.utcnow() + >>> utc + + + >>> utc.to('US/Pacific') + + + >>> utc.to(tz.tzlocal()) + + + >>> utc.to('-07:00') + + + >>> utc.to('local') + + + >>> utc.to('local').to('utc') + + + """ + + if not isinstance(tz, tzinfo): + tz = parser.TzinfoParser.parse(tz) + + dt = self._datetime.astimezone(tz) + + return self.__class__( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + ) + + def span(self, frame, count=1): + """ Returns two new :class:`Arrow ` objects, representing the timespan + of the :class:`Arrow ` object in a given timeframe. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param count: (optional) the number of frames to span. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Usage:: + + >>> arrow.utcnow() + + + >>> arrow.utcnow().span('hour') + (, ) + + >>> arrow.utcnow().span('day') + (, ) + + >>> arrow.utcnow().span('day', count=2) + (, ) + + """ + + frame_absolute, frame_relative, relative_steps = self._get_frames(frame) + + if frame_absolute == "week": + attr = "day" + elif frame_absolute == "quarter": + attr = "month" + else: + attr = frame_absolute + + index = self._ATTRS.index(attr) + frames = self._ATTRS[: index + 1] + + values = [getattr(self, f) for f in frames] + + for _ in range(3 - len(values)): + values.append(1) + + floor = self.__class__(*values, tzinfo=self.tzinfo) + + if frame_absolute == "week": + floor = floor + relativedelta(days=-(self.isoweekday() - 1)) + elif frame_absolute == "quarter": + floor = floor + relativedelta(months=-((self.month - 1) % 3)) + + ceil = ( + floor + + relativedelta(**{frame_relative: count * relative_steps}) + + relativedelta(microseconds=-1) + ) + + return floor, ceil + + def floor(self, frame): + """ Returns a new :class:`Arrow ` object, representing the "floor" + of the timespan of the :class:`Arrow ` object in a given timeframe. + Equivalent to the first element in the 2-tuple returned by + :func:`span `. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + + Usage:: + + >>> arrow.utcnow().floor('hour') + + """ + + return self.span(frame)[0] + + def ceil(self, frame): + """ Returns a new :class:`Arrow ` object, representing the "ceiling" + of the timespan of the :class:`Arrow ` object in a given timeframe. + Equivalent to the second element in the 2-tuple returned by + :func:`span `. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + + Usage:: + + >>> arrow.utcnow().ceil('hour') + + """ + + return self.span(frame)[1] + + # string output and formatting. + + def format(self, fmt="YYYY-MM-DD HH:mm:ssZZ", locale="en_us"): + """ Returns a string representation of the :class:`Arrow ` object, + formatted according to a format string. + + :param fmt: the format string. + + Usage:: + + >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') + '2013-05-09 03:56:47 -00:00' + + >>> arrow.utcnow().format('X') + '1368071882' + + >>> arrow.utcnow().format('MMMM DD, YYYY') + 'May 09, 2013' + + >>> arrow.utcnow().format() + '2013-05-09 03:56:47 -00:00' + + """ + + return formatter.DateTimeFormatter(locale).format(self._datetime, fmt) + + def humanize( + self, other=None, locale="en_us", only_distance=False, granularity="auto" + ): + """ Returns a localized, humanized representation of a relative difference in time. + + :param other: (optional) an :class:`Arrow ` or ``datetime`` object. + Defaults to now in the current :class:`Arrow ` object's timezone. + :param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'. + :param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part. + :param granularity: (optional) defines the precision of the output. Set it to strings 'second', 'minute', 'hour', 'day', 'month' or 'year'. + + Usage:: + + >>> earlier = arrow.utcnow().shift(hours=-2) + >>> earlier.humanize() + '2 hours ago' + + >>> later = earlier.shift(hours=4) + >>> later.humanize(earlier) + 'in 4 hours' + + """ + + locale = locales.get_locale(locale) + + if other is None: + utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc()) + dt = utc.astimezone(self._datetime.tzinfo) + + elif isinstance(other, Arrow): + dt = other._datetime + + elif isinstance(other, datetime): + if other.tzinfo is None: + dt = other.replace(tzinfo=self._datetime.tzinfo) + else: + dt = other.astimezone(self._datetime.tzinfo) + + else: + raise TypeError() + + delta = int(round(util.total_seconds(self._datetime - dt))) + sign = -1 if delta < 0 else 1 + diff = abs(delta) + delta = diff + + if granularity == "auto": + if diff < 10: + return locale.describe("now", only_distance=only_distance) + + if diff < 45: + seconds = sign * delta + return locale.describe("seconds", seconds, only_distance=only_distance) + + elif diff < 90: + return locale.describe("minute", sign, only_distance=only_distance) + elif diff < 2700: + minutes = sign * int(max(delta / 60, 2)) + return locale.describe("minutes", minutes, only_distance=only_distance) + + elif diff < 5400: + return locale.describe("hour", sign, only_distance=only_distance) + elif diff < 79200: + hours = sign * int(max(delta / 3600, 2)) + return locale.describe("hours", hours, only_distance=only_distance) + + elif diff < 129600: + return locale.describe("day", sign, only_distance=only_distance) + elif diff < 2160000: + days = sign * int(max(delta / 86400, 2)) + return locale.describe("days", days, only_distance=only_distance) + + elif diff < 3888000: + return locale.describe("month", sign, only_distance=only_distance) + elif diff < 29808000: + self_months = self._datetime.year * 12 + self._datetime.month + other_months = dt.year * 12 + dt.month + + months = sign * int(max(abs(other_months - self_months), 2)) + + return locale.describe("months", months, only_distance=only_distance) + + elif diff < 47260800: + return locale.describe("year", sign, only_distance=only_distance) + else: + years = sign * int(max(delta / 31536000, 2)) + return locale.describe("years", years, only_distance=only_distance) + + else: + if granularity == "second": + delta = sign * delta + if abs(delta) < 2: + return locale.describe("now", only_distance=only_distance) + elif granularity == "minute": + delta = sign * delta / float(60) + elif granularity == "hour": + delta = sign * delta / float(60 * 60) + elif granularity == "day": + delta = sign * delta / float(60 * 60 * 24) + elif granularity == "month": + delta = sign * delta / float(60 * 60 * 24 * 30.5) + elif granularity == "year": + delta = sign * delta / float(60 * 60 * 24 * 365.25) + else: + raise AttributeError( + 'Error. Could not understand your level of granularity. Please select between \ + "second", "minute", "hour", "day", "week", "month" or "year"' + ) + + if trunc(abs(delta)) != 1: + granularity += "s" + return locale.describe(granularity, delta, only_distance=only_distance) + + # query functions + + def is_between(self, start, end, bounds="()"): + """ Returns a boolean denoting whether the specified date and time is between + the start and end dates and times. + + :param start: an :class:`Arrow ` object. + :param end: an :class:`Arrow ` object. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in the range. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '()' is used. + + Usage:: + + >>> start = arrow.get(datetime(2013, 5, 5, 12, 30, 10)) + >>> end = arrow.get(datetime(2013, 5, 5, 12, 30, 36)) + >>> arrow.get(datetime(2013, 5, 5, 12, 30, 27)).is_between(start, end) + True + + >>> start = arrow.get(datetime(2013, 5, 5)) + >>> end = arrow.get(datetime(2013, 5, 8)) + >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[]') + True + + >>> start = arrow.get(datetime(2013, 5, 5)) + >>> end = arrow.get(datetime(2013, 5, 8)) + >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[)') + False + + """ + + if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]": + raise AttributeError( + 'Error. Could not understand the specified bounds. Please select between \ + "()", "(]", "[)", or "[]"' + ) + + if not isinstance(start, Arrow): + raise TypeError( + "Can't parse start date argument type of '{}'".format(type(start)) + ) + + if not isinstance(end, Arrow): + raise TypeError( + "Can't parse end date argument type of '{}'".format(type(end)) + ) + + include_start = bounds[0] == "[" + include_end = bounds[1] == "]" + + target_timestamp = self.float_timestamp + start_timestamp = start.float_timestamp + end_timestamp = end.float_timestamp + + if include_start and include_end: + return ( + target_timestamp >= start_timestamp + and target_timestamp <= end_timestamp + ) + elif include_start and not include_end: + return ( + target_timestamp >= start_timestamp and target_timestamp < end_timestamp + ) + elif not include_start and include_end: + return ( + target_timestamp > start_timestamp and target_timestamp <= end_timestamp + ) + else: + return ( + target_timestamp > start_timestamp and target_timestamp < end_timestamp + ) + + # math + + def __add__(self, other): + + if isinstance(other, (timedelta, relativedelta)): + return self.fromdatetime(self._datetime + other, self._datetime.tzinfo) + + return NotImplemented + + def __radd__(self, other): + return self.__add__(other) + + def __sub__(self, other): + + if isinstance(other, (timedelta, relativedelta)): + return self.fromdatetime(self._datetime - other, self._datetime.tzinfo) + + elif isinstance(other, datetime): + return self._datetime - other + + elif isinstance(other, Arrow): + return self._datetime - other._datetime + + return NotImplemented + + def __rsub__(self, other): + + if isinstance(other, datetime): + return other - self._datetime + + return NotImplemented + + # comparisons + + def __eq__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return False + + return self._datetime == self._get_datetime(other) + + def __ne__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return True + + return not self.__eq__(other) + + def __gt__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime > self._get_datetime(other) + + def __ge__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime >= self._get_datetime(other) + + def __lt__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime < self._get_datetime(other) + + def __le__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime <= self._get_datetime(other) + + def __cmp__(self, other): + if sys.version_info[0] < 3: # pragma: no cover + if not isinstance(other, (Arrow, datetime)): + raise TypeError( + "can't compare '{}' to '{}'".format(type(self), type(other)) + ) + + # datetime methods + + def date(self): + """ Returns a ``date`` object with the same year, month and day. + + Usage:: + + >>> arrow.utcnow().date() + datetime.date(2019, 1, 23) + + """ + + return self._datetime.date() + + def time(self): + """ Returns a ``time`` object with the same hour, minute, second, microsecond. + + Usage:: + + >>> arrow.utcnow().time() + datetime.time(12, 15, 34, 68352) + + """ + + return self._datetime.time() + + def timetz(self): + """ Returns a ``time`` object with the same hour, minute, second, microsecond and + tzinfo. + + Usage:: + + >>> arrow.utcnow().timetz() + datetime.time(12, 5, 18, 298893, tzinfo=tzutc()) + + """ + + return self._datetime.timetz() + + def astimezone(self, tz): + """ Returns a ``datetime`` object, converted to the specified timezone. + + :param tz: a ``tzinfo`` object. + + Usage:: + + >>> pacific=arrow.now('US/Pacific') + >>> nyc=arrow.now('America/New_York').tzinfo + >>> pacific.astimezone(nyc) + datetime.datetime(2019, 1, 20, 10, 24, 22, 328172, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York')) + + """ + + return self._datetime.astimezone(tz) + + def utcoffset(self): + """ Returns a ``timedelta`` object representing the whole number of minutes difference from + UTC time. + + Usage:: + + >>> arrow.now('US/Pacific').utcoffset() + datetime.timedelta(-1, 57600) + + """ + + return self._datetime.utcoffset() + + def dst(self): + """ Returns the daylight savings time adjustment. + + Usage:: + + >>> arrow.utcnow().dst() + datetime.timedelta(0) + + """ + + return self._datetime.dst() + + def timetuple(self): + """ Returns a ``time.struct_time``, in the current timezone. + + Usage:: + + >>> arrow.utcnow().timetuple() + time.struct_time(tm_year=2019, tm_mon=1, tm_mday=20, tm_hour=15, tm_min=17, tm_sec=8, tm_wday=6, tm_yday=20, tm_isdst=0) + + """ + + return self._datetime.timetuple() + + def utctimetuple(self): + """ Returns a ``time.struct_time``, in UTC time. + + Usage:: + + >>> arrow.utcnow().utctimetuple() + time.struct_time(tm_year=2019, tm_mon=1, tm_mday=19, tm_hour=21, tm_min=41, tm_sec=7, tm_wday=5, tm_yday=19, tm_isdst=0) + + """ + + return self._datetime.utctimetuple() + + def toordinal(self): + """ Returns the proleptic Gregorian ordinal of the date. + + Usage:: + + >>> arrow.utcnow().toordinal() + 737078 + + """ + + return self._datetime.toordinal() + + def weekday(self): + """ Returns the day of the week as an integer (0-6). + + Usage:: + + >>> arrow.utcnow().weekday() + 5 + + """ + + return self._datetime.weekday() + + def isoweekday(self): + """ Returns the ISO day of the week as an integer (1-7). + + Usage:: + + >>> arrow.utcnow().isoweekday() + 6 + + """ + + return self._datetime.isoweekday() + + def isocalendar(self): + """ Returns a 3-tuple, (ISO year, ISO week number, ISO weekday). + + Usage:: + + >>> arrow.utcnow().isocalendar() + (2019, 3, 6) + + """ + + return self._datetime.isocalendar() + + def isoformat(self, sep="T"): + """Returns an ISO 8601 formatted representation of the date and time. + + Usage:: + + >>> arrow.utcnow().isoformat() + '2019-01-19T18:30:52.442118+00:00' + + """ + + return self._datetime.isoformat(sep) + + def ctime(self): + """ Returns a ctime formatted representation of the date and time. + + Usage:: + + >>> arrow.utcnow().ctime() + 'Sat Jan 19 18:26:50 2019' + + """ + + return self._datetime.ctime() + + def strftime(self, format): + """ Formats in the style of ``datetime.strftime``. + + :param format: the format string. + + Usage:: + + >>> arrow.utcnow().strftime('%d-%m-%Y %H:%M:%S') + '23-01-2019 12:28:17' + + """ + + return self._datetime.strftime(format) + + def for_json(self): + """Serializes for the ``for_json`` protocol of simplejson. + + Usage:: + + >>> arrow.utcnow().for_json() + '2019-01-19T18:25:36.760079+00:00' + + """ + + return self.isoformat() + + # internal tools. + + @staticmethod + def _get_tzinfo(tz_expr): + + if tz_expr is None: + return dateutil_tz.tzutc() + if isinstance(tz_expr, tzinfo): + return tz_expr + else: + try: + return parser.TzinfoParser.parse(tz_expr) + except parser.ParserError: + raise ValueError("'{}' not recognized as a timezone".format(tz_expr)) + + @classmethod + def _get_datetime(cls, expr): + + if isinstance(expr, Arrow): + return expr.datetime + + if isinstance(expr, datetime): + return expr + + try: + expr = float(expr) + return cls.utcfromtimestamp(expr).datetime + except Exception: + raise ValueError( + "'{}' not recognized as a timestamp or datetime".format(expr) + ) + + @classmethod + def _get_frames(cls, name): + + if name in cls._ATTRS: + return name, "{}s".format(name), 1 + + elif name in ["week", "weeks"]: + return "week", "weeks", 1 + elif name in ["quarter", "quarters"]: + return "quarter", "months", 3 + + supported = ", ".join(cls._ATTRS + ["week", "weeks"] + ["quarter", "quarters"]) + raise AttributeError( + "range/span over frame {} not supported. Supported frames: {}".format( + name, supported + ) + ) + + @classmethod + def _get_iteration_params(cls, end, limit): + + if end is None: + + if limit is None: + raise Exception("one of 'end' or 'limit' is required") + + return cls.max, limit + + else: + if limit is None: + return end, sys.maxsize + return end, limit + + @staticmethod + def _get_timestamp_from_input(timestamp): + + try: + return float(timestamp) + except Exception: + raise ValueError("cannot parse '{}' as a timestamp".format(timestamp)) + + +Arrow.min = Arrow.fromdatetime(datetime.min) +Arrow.max = Arrow.fromdatetime(datetime.max) diff --git a/py3/lib/python3.6/site-packages/arrow/factory.py b/py3/lib/python3.6/site-packages/arrow/factory.py new file mode 100644 index 0000000..8cca6ac --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/factory.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +""" +Implements the :class:`ArrowFactory ` class, +providing factory methods for common :class:`Arrow ` +construction scenarios. + +""" + +from __future__ import absolute_import + +import calendar +import warnings +from datetime import date, datetime, tzinfo +from time import struct_time + +from dateutil import tz as dateutil_tz + +from arrow import parser +from arrow.arrow import Arrow +from arrow.util import is_timestamp, isstr + + +class ArrowParseWarning(DeprecationWarning): + """Raised when arrow.get() is passed a string with no formats and matches incorrectly + on one of the default formats. + + e.g. + arrow.get('blabla2016') -> + arrow.get('13/4/2045') -> + + In version 0.15.0 this warning will become a ParserError. + """ + + +warnings.simplefilter("always", ArrowParseWarning) + + +class ArrowFactory(object): + """ A factory for generating :class:`Arrow ` objects. + + :param type: (optional) the :class:`Arrow `-based class to construct from. + Defaults to :class:`Arrow `. + + """ + + def __init__(self, type=Arrow): + self.type = type + + def get(self, *args, **kwargs): + """ Returns an :class:`Arrow ` object based on flexible inputs. + + :param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to + 'en_us'. + :param tzinfo: (optional) a :ref:`timezone expression ` or tzinfo object. + Replaces the timezone unless using an input form that is explicitly UTC or specifies + the timezone in a positional argument. Defaults to UTC. + + Usage:: + + >>> import arrow + + **No inputs** to get current UTC time:: + + >>> arrow.get() + + + **None** to also get current UTC time:: + + >>> arrow.get(None) + + + **One** :class:`Arrow ` object, to get a copy. + + >>> arw = arrow.utcnow() + >>> arrow.get(arw) + + + **One** ``str``, ``float``, or ``int``, convertible to a floating-point timestamp, to get + that timestamp in UTC:: + + >>> arrow.get(1367992474.293378) + + + >>> arrow.get(1367992474) + + + >>> arrow.get('1367992474.293378') + + + >>> arrow.get('1367992474') + + + **One** ISO-8601-formatted ``str``, to parse it:: + + >>> arrow.get('2013-09-29T01:26:43.830580') + + + **One** ``tzinfo``, to get the current time **converted** to that timezone:: + + >>> arrow.get(tz.tzlocal()) + + + **One** naive ``datetime``, to get that datetime in UTC:: + + >>> arrow.get(datetime(2013, 5, 5)) + + + **One** aware ``datetime``, to get that datetime:: + + >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal())) + + + **One** naive ``date``, to get that date in UTC:: + + >>> arrow.get(date(2013, 5, 5)) + + + **Two** arguments, a naive or aware ``datetime``, and a replacement + :ref:`timezone expression `:: + + >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') + + + **Two** arguments, a naive ``date``, and a replacement + :ref:`timezone expression `:: + + >>> arrow.get(date(2013, 5, 5), 'US/Pacific') + + + **Two** arguments, both ``str``, to parse the first according to the format of the second:: + + >>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ') + + + **Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try:: + + >>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss']) + + + **Three or more** arguments, as for the constructor of a ``datetime``:: + + >>> arrow.get(2013, 5, 5, 12, 30, 45) + + + **One** time.struct time:: + + >>> arrow.get(gmtime(0)) + + + """ + + arg_count = len(args) + locale = kwargs.pop("locale", "en_us") + tz = kwargs.get("tzinfo", None) + + # if kwargs given, send to constructor unless only tzinfo provided + if len(kwargs) > 1: + arg_count = 3 + + # tzinfo kwarg is not provided + if len(kwargs) == 1 and tz is None: + arg_count = 3 + + # () -> now, @ utc. + if arg_count == 0: + if isinstance(tz, tzinfo): + return self.type.now(tz) + return self.type.utcnow() + + if arg_count == 1: + arg = args[0] + + # (None) -> now, @ utc. + if arg is None: + return self.type.utcnow() + + # try (int, float, str(int), str(float)) -> utc, from timestamp. + if is_timestamp(arg): + return self.type.utcfromtimestamp(arg) + + # (Arrow) -> from the object's datetime. + if isinstance(arg, Arrow): + return self.type.fromdatetime(arg.datetime) + + # (datetime) -> from datetime. + if isinstance(arg, datetime): + return self.type.fromdatetime(arg) + + # (date) -> from date. + if isinstance(arg, date): + return self.type.fromdate(arg) + + # (tzinfo) -> now, @ tzinfo. + elif isinstance(arg, tzinfo): + return self.type.now(arg) + + # (str) -> parse. + elif isstr(arg): + warnings.warn( + "The .get() parsing method without a format string will parse more strictly in version 0.15.0." + "See https://github.com/crsmithdev/arrow/issues/612 for more details.", + ArrowParseWarning, + ) + dt = parser.DateTimeParser(locale).parse_iso(arg) + return self.type.fromdatetime(dt, tz) + + # (struct_time) -> from struct_time + elif isinstance(arg, struct_time): + return self.type.utcfromtimestamp(calendar.timegm(arg)) + + else: + raise TypeError( + "Can't parse single argument type of '{}'".format(type(arg)) + ) + + elif arg_count == 2: + + arg_1, arg_2 = args[0], args[1] + + if isinstance(arg_1, datetime): + + # (datetime, tzinfo/str) -> fromdatetime replace tzinfo. + if isinstance(arg_2, tzinfo) or isstr(arg_2): + return self.type.fromdatetime(arg_1, arg_2) + else: + raise TypeError( + "Can't parse two arguments of types 'datetime', '{}'".format( + type(arg_2) + ) + ) + + elif isinstance(arg_1, date): + + # (date, tzinfo/str) -> fromdate replace tzinfo. + if isinstance(arg_2, tzinfo) or isstr(arg_2): + return self.type.fromdate(arg_1, tzinfo=arg_2) + else: + raise TypeError( + "Can't parse two arguments of types 'date', '{}'".format( + type(arg_2) + ) + ) + + # (str, format) -> parse. + elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)): + warnings.warn( + "The .get() parsing method with a format string will parse more strictly in version 0.15.0." + "See https://github.com/crsmithdev/arrow/issues/612 for more details.", + ArrowParseWarning, + ) + dt = parser.DateTimeParser(locale).parse(args[0], args[1]) + return self.type.fromdatetime(dt, tzinfo=tz) + + else: + raise TypeError( + "Can't parse two arguments of types '{}', '{}'".format( + type(arg_1), type(arg_2) + ) + ) + + # 3+ args -> datetime-like via constructor. + else: + return self.type(*args, **kwargs) + + def utcnow(self): + """Returns an :class:`Arrow ` object, representing "now" in UTC time. + + Usage:: + + >>> import arrow + >>> arrow.utcnow() + + """ + + return self.type.utcnow() + + def now(self, tz=None): + """Returns an :class:`Arrow ` object, representing "now" in the given + timezone. + + :param tz: (optional) A :ref:`timezone expression `. Defaults to local time. + + Usage:: + + >>> import arrow + >>> arrow.now() + + + >>> arrow.now('US/Pacific') + + + >>> arrow.now('+02:00') + + + >>> arrow.now('local') + + """ + + if tz is None: + tz = dateutil_tz.tzlocal() + elif not isinstance(tz, tzinfo): + tz = parser.TzinfoParser.parse(tz) + + return self.type.now(tz) diff --git a/py3/lib/python3.6/site-packages/arrow/formatter.py b/py3/lib/python3.6/site-packages/arrow/formatter.py new file mode 100644 index 0000000..08c89e0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/formatter.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import calendar +import re + +from dateutil import tz as dateutil_tz + +from arrow import locales, util + + +class DateTimeFormatter(object): + + _FORMAT_RE = re.compile( + r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X)" + ) + + def __init__(self, locale="en_us"): + + self.locale = locales.get_locale(locale) + + def format(cls, dt, fmt): + + return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt) + + def _format_token(self, dt, token): + + if token == "YYYY": + return self.locale.year_full(dt.year) + if token == "YY": + return self.locale.year_abbreviation(dt.year) + + if token == "MMMM": + return self.locale.month_name(dt.month) + if token == "MMM": + return self.locale.month_abbreviation(dt.month) + if token == "MM": + return "{:02d}".format(dt.month) + if token == "M": + return str(dt.month) + + if token == "DDDD": + return "{:03d}".format(dt.timetuple().tm_yday) + if token == "DDD": + return str(dt.timetuple().tm_yday) + if token == "DD": + return "{:02d}".format(dt.day) + if token == "D": + return str(dt.day) + + if token == "Do": + return self.locale.ordinal_number(dt.day) + + if token == "dddd": + return self.locale.day_name(dt.isoweekday()) + if token == "ddd": + return self.locale.day_abbreviation(dt.isoweekday()) + if token == "d": + return str(dt.isoweekday()) + + if token == "HH": + return "{:02d}".format(dt.hour) + if token == "H": + return str(dt.hour) + if token == "hh": + return "{:02d}".format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) + if token == "h": + return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) + + if token == "mm": + return "{:02d}".format(dt.minute) + if token == "m": + return str(dt.minute) + + if token == "ss": + return "{:02d}".format(dt.second) + if token == "s": + return str(dt.second) + + if token == "SSSSSS": + return str("{:06d}".format(int(dt.microsecond))) + if token == "SSSSS": + return str("{:05d}".format(int(dt.microsecond / 10))) + if token == "SSSS": + return str("{:04d}".format(int(dt.microsecond / 100))) + if token == "SSS": + return str("{:03d}".format(int(dt.microsecond / 1000))) + if token == "SS": + return str("{:02d}".format(int(dt.microsecond / 10000))) + if token == "S": + return str(int(dt.microsecond / 100000)) + + if token == "X": + return str(calendar.timegm(dt.utctimetuple())) + + if token == "ZZZ": + return dt.tzname() + + if token in ["ZZ", "Z"]: + separator = ":" if token == "ZZ" else "" + tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo + total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60) + + sign = "+" if total_minutes >= 0 else "-" + total_minutes = abs(total_minutes) + hour, minute = divmod(total_minutes, 60) + + return "{}{:02d}{}{:02d}".format(sign, hour, separator, minute) + + if token in ("a", "A"): + return self.locale.meridian(dt.hour, token) diff --git a/py3/lib/python3.6/site-packages/arrow/locales.py b/py3/lib/python3.6/site-packages/arrow/locales.py new file mode 100644 index 0000000..20017af --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/locales.py @@ -0,0 +1,3945 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import inspect +import sys +from math import trunc + + +def get_locale(name): + """Returns an appropriate :class:`Locale ` + corresponding to an inpute locale name. + + :param name: the name of the locale. + + """ + + locale_cls = _locales.get(name.lower()) + + if locale_cls is None: + raise ValueError("Unsupported locale '{}'".format(name)) + + return locale_cls() + + +# base locale type. + + +class Locale(object): + """ Represents locale-specific data and functionality. """ + + names = [] + + timeframes = { + "now": "", + "seconds": "", + "minute": "", + "minutes": "", + "hour": "", + "hours": "", + "day": "", + "days": "", + "month": "", + "months": "", + "year": "", + "years": "", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + past = None + future = None + + month_names = [] + month_abbreviations = [] + + day_names = [] + day_abbreviations = [] + + ordinal_day_re = r"(\d+)" + + def __init__(self): + + self._month_name_to_ordinal = None + + def describe(self, timeframe, delta=0, only_distance=False): + """ Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + humanized = self._format_timeframe(timeframe, delta) + if not only_distance: + humanized = self._format_relative(humanized, timeframe, delta) + + return humanized + + def day_name(self, day): + """ Returns the day name for a specified day of the week. + + :param day: the ``int`` day of the week (1-7). + + """ + + return self.day_names[day] + + def day_abbreviation(self, day): + """ Returns the day abbreviation for a specified day of the week. + + :param day: the ``int`` day of the week (1-7). + + """ + + return self.day_abbreviations[day] + + def month_name(self, month): + """ Returns the month name for a specified month of the year. + + :param month: the ``int`` month of the year (1-12). + + """ + + return self.month_names[month] + + def month_abbreviation(self, month): + """ Returns the month abbreviation for a specified month of the year. + + :param month: the ``int`` month of the year (1-12). + + """ + + return self.month_abbreviations[month] + + def month_number(self, name): + """ Returns the month number for a month specified by name or abbreviation. + + :param name: the month name or abbreviation. + + """ + + if self._month_name_to_ordinal is None: + self._month_name_to_ordinal = self._name_to_ordinal(self.month_names) + self._month_name_to_ordinal.update( + self._name_to_ordinal(self.month_abbreviations) + ) + + return self._month_name_to_ordinal.get(name) + + def year_full(self, year): + """ Returns the year for specific locale if available + + :param name: the ``int`` year (4-digit) + """ + return "{:04d}".format(year) + + def year_abbreviation(self, year): + """ Returns the year for specific locale if available + + :param name: the ``int`` year (4-digit) + """ + return "{:04d}".format(year)[2:] + + def meridian(self, hour, token): + """ Returns the meridian indicator for a specified hour and format token. + + :param hour: the ``int`` hour of the day. + :param token: the format token. + """ + + if token == "a": + return self.meridians["am"] if hour < 12 else self.meridians["pm"] + if token == "A": + return self.meridians["AM"] if hour < 12 else self.meridians["PM"] + + def ordinal_number(self, n): + """ Returns the ordinal format of a given integer + + :param n: an integer + """ + return self._ordinal_number(n) + + def _ordinal_number(self, n): + return "{}".format(n) + + def _name_to_ordinal(self, lst): + return dict(map(lambda i: (i[1].lower(), i[0] + 1), enumerate(lst[1:]))) + + def _format_timeframe(self, timeframe, delta): + return self.timeframes[timeframe].format(trunc(abs(delta))) + + def _format_relative(self, humanized, timeframe, delta): + + if timeframe == "now": + return humanized + + direction = self.past if delta < 0 else self.future + + return direction.format(humanized) + + +# base locale type implementations. + + +class EnglishLocale(Locale): + + names = [ + "en", + "en_us", + "en_gb", + "en_au", + "en_be", + "en_jp", + "en_za", + "en_ca", + "en_ph", + ] + + past = "{0} ago" + future = "in {0}" + + timeframes = { + "now": "just now", + "seconds": "seconds", + "minute": "a minute", + "minutes": "{0} minutes", + "hour": "an hour", + "hours": "{0} hours", + "day": "a day", + "days": "{0} days", + "month": "a month", + "months": "{0} months", + "year": "a year", + "years": "{0} years", + } + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + month_names = [ + "", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ] + + day_names = [ + "", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday", + "Sunday", + ] + day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] + + ordinal_day_re = r"((?P[2-3]?1(?=st)|[2-3]?2(?=nd)|[2-3]?3(?=rd)|[1-3]?[04-9](?=th)|1[1-3](?=th))(st|nd|rd|th))" + + def _ordinal_number(self, n): + if n % 100 not in (11, 12, 13): + remainder = abs(n) % 10 + if remainder == 1: + return "{}st".format(n) + elif remainder == 2: + return "{}nd".format(n) + elif remainder == 3: + return "{}rd".format(n) + return "{}th".format(n) + + def describe(self, timeframe, delta=0, only_distance=False): + """ Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + humanized = super(EnglishLocale, self).describe(timeframe, delta, only_distance) + if only_distance and timeframe == "now": + humanized = "instantly" + + return humanized + + +class ItalianLocale(Locale): + names = ["it", "it_it"] + past = "{0} fa" + future = "tra {0}" + + timeframes = { + "now": "adesso", + "seconds": "qualche secondo", + "minute": "un minuto", + "minutes": "{0} minuti", + "hour": "un'ora", + "hours": "{0} ore", + "day": "un giorno", + "days": "{0} giorni", + "month": "un mese", + "months": "{0} mesi", + "year": "un anno", + "years": "{0} anni", + } + + month_names = [ + "", + "gennaio", + "febbraio", + "marzo", + "aprile", + "maggio", + "giugno", + "luglio", + "agosto", + "settembre", + "ottobre", + "novembre", + "dicembre", + ] + month_abbreviations = [ + "", + "gen", + "feb", + "mar", + "apr", + "mag", + "giu", + "lug", + "ago", + "set", + "ott", + "nov", + "dic", + ] + + day_names = [ + "", + "lunedì", + "martedì", + "mercoledì", + "giovedì", + "venerdì", + "sabato", + "domenica", + ] + day_abbreviations = ["", "lun", "mar", "mer", "gio", "ven", "sab", "dom"] + + ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" + + def _ordinal_number(self, n): + return "{}º".format(n) + + +class SpanishLocale(Locale): + names = ["es", "es_es"] + past = "hace {0}" + future = "en {0}" + + timeframes = { + "now": "ahora", + "seconds": "segundos", + "minute": "un minuto", + "minutes": "{0} minutos", + "hour": "una hora", + "hours": "{0} horas", + "day": "un día", + "days": "{0} días", + "month": "un mes", + "months": "{0} meses", + "year": "un año", + "years": "{0} años", + } + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + month_names = [ + "", + "enero", + "febrero", + "marzo", + "abril", + "mayo", + "junio", + "julio", + "agosto", + "septiembre", + "octubre", + "noviembre", + "diciembre", + ] + month_abbreviations = [ + "", + "ene", + "feb", + "mar", + "abr", + "may", + "jun", + "jul", + "ago", + "sep", + "oct", + "nov", + "dic", + ] + + day_names = [ + "", + "lunes", + "martes", + "miércoles", + "jueves", + "viernes", + "sábado", + "domingo", + ] + day_abbreviations = ["", "lun", "mar", "mie", "jue", "vie", "sab", "dom"] + + ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" + + def _ordinal_number(self, n): + return "{}º".format(n) + + +class FrenchLocale(Locale): + names = ["fr", "fr_fr"] + past = "il y a {0}" + future = "dans {0}" + + timeframes = { + "now": "maintenant", + "seconds": "quelques secondes", + "minute": "une minute", + "minutes": "{0} minutes", + "hour": "une heure", + "hours": "{0} heures", + "day": "un jour", + "days": "{0} jours", + "month": "un mois", + "months": "{0} mois", + "year": "un an", + "years": "{0} ans", + } + + month_names = [ + "", + "janvier", + "février", + "mars", + "avril", + "mai", + "juin", + "juillet", + "août", + "septembre", + "octobre", + "novembre", + "décembre", + ] + month_abbreviations = [ + "", + "janv", + "févr", + "mars", + "avr", + "mai", + "juin", + "juil", + "août", + "sept", + "oct", + "nov", + "déc", + ] + + day_names = [ + "", + "lundi", + "mardi", + "mercredi", + "jeudi", + "vendredi", + "samedi", + "dimanche", + ] + day_abbreviations = ["", "lun", "mar", "mer", "jeu", "ven", "sam", "dim"] + + ordinal_day_re = ( + r"((?P\b1(?=er\b)|[1-3]?[02-9](?=e\b)|[1-3]1(?=e\b))(er|e)\b)" + ) + + def _ordinal_number(self, n): + if abs(n) == 1: + return "{}er".format(n) + return "{}e".format(n) + + +class GreekLocale(Locale): + + names = ["el", "el_gr"] + + past = "{0} πριν" + future = "σε {0}" + + timeframes = { + "now": "τώρα", + "seconds": "δευτερόλεπτα", + "minute": "ένα λεπτό", + "minutes": "{0} λεπτά", + "hour": "μία ώρα", + "hours": "{0} ώρες", + "day": "μία μέρα", + "days": "{0} μέρες", + "month": "ένα μήνα", + "months": "{0} μήνες", + "year": "ένα χρόνο", + "years": "{0} χρόνια", + } + + month_names = [ + "", + "Ιανουαρίου", + "Φεβρουαρίου", + "Μαρτίου", + "Απριλίου", + "Μαΐου", + "Ιουνίου", + "Ιουλίου", + "Αυγούστου", + "Σεπτεμβρίου", + "Οκτωβρίου", + "Νοεμβρίου", + "Δεκεμβρίου", + ] + month_abbreviations = [ + "", + "Ιαν", + "Φεβ", + "Μαρ", + "Απρ", + "Μαϊ", + "Ιον", + "Ιολ", + "Αυγ", + "Σεπ", + "Οκτ", + "Νοε", + "Δεκ", + ] + + day_names = [ + "", + "Δευτέρα", + "Τρίτη", + "Τετάρτη", + "Πέμπτη", + "Παρασκευή", + "Σάββατο", + "Κυριακή", + ] + day_abbreviations = ["", "Δευ", "Τρι", "Τετ", "Πεμ", "Παρ", "Σαβ", "Κυρ"] + + +class JapaneseLocale(Locale): + + names = ["ja", "ja_jp"] + + past = "{0}前" + future = "{0}後" + + timeframes = { + "now": "現在", + "seconds": "数秒", + "minute": "1分", + "minutes": "{0}分", + "hour": "1時間", + "hours": "{0}時間", + "day": "1日", + "days": "{0}日", + "month": "1ヶ月", + "months": "{0}ヶ月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "1月", + "2月", + "3月", + "4月", + "5月", + "6月", + "7月", + "8月", + "9月", + "10月", + "11月", + "12月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "月曜日", "火曜日", "水曜日", "木曜日", "金曜日", "土曜日", "日曜日"] + day_abbreviations = ["", "月", "火", "水", "木", "金", "土", "日"] + + +class SwedishLocale(Locale): + + names = ["sv", "sv_se"] + + past = "för {0} sen" + future = "om {0}" + + timeframes = { + "now": "just nu", + "seconds": "några sekunder", + "minute": "en minut", + "minutes": "{0} minuter", + "hour": "en timme", + "hours": "{0} timmar", + "day": "en dag", + "days": "{0} dagar", + "month": "en månad", + "months": "{0} månader", + "year": "ett år", + "years": "{0} år", + } + + month_names = [ + "", + "januari", + "februari", + "mars", + "april", + "maj", + "juni", + "juli", + "augusti", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "måndag", + "tisdag", + "onsdag", + "torsdag", + "fredag", + "lördag", + "söndag", + ] + day_abbreviations = ["", "mån", "tis", "ons", "tor", "fre", "lör", "sön"] + + +class FinnishLocale(Locale): + + names = ["fi", "fi_fi"] + + # The finnish grammar is very complex, and its hard to convert + # 1-to-1 to something like English. + + past = "{0} sitten" + future = "{0} kuluttua" + + timeframes = { + "now": ["juuri nyt", "juuri nyt"], + "seconds": ["muutama sekunti", "muutaman sekunnin"], + "minute": ["minuutti", "minuutin"], + "minutes": ["{0} minuuttia", "{0} minuutin"], + "hour": ["tunti", "tunnin"], + "hours": ["{0} tuntia", "{0} tunnin"], + "day": ["päivä", "päivä"], + "days": ["{0} päivää", "{0} päivän"], + "month": ["kuukausi", "kuukauden"], + "months": ["{0} kuukautta", "{0} kuukauden"], + "year": ["vuosi", "vuoden"], + "years": ["{0} vuotta", "{0} vuoden"], + } + + # Months and days are lowercase in Finnish + month_names = [ + "", + "tammikuu", + "helmikuu", + "maaliskuu", + "huhtikuu", + "toukokuu", + "kesäkuu", + "heinäkuu", + "elokuu", + "syyskuu", + "lokakuu", + "marraskuu", + "joulukuu", + ] + + month_abbreviations = [ + "", + "tammi", + "helmi", + "maalis", + "huhti", + "touko", + "kesä", + "heinä", + "elo", + "syys", + "loka", + "marras", + "joulu", + ] + + day_names = [ + "", + "maanantai", + "tiistai", + "keskiviikko", + "torstai", + "perjantai", + "lauantai", + "sunnuntai", + ] + + day_abbreviations = ["", "ma", "ti", "ke", "to", "pe", "la", "su"] + + def _format_timeframe(self, timeframe, delta): + return ( + self.timeframes[timeframe][0].format(abs(delta)), + self.timeframes[timeframe][1].format(abs(delta)), + ) + + def _format_relative(self, humanized, timeframe, delta): + if timeframe == "now": + return humanized[0] + + direction = self.past if delta < 0 else self.future + which = 0 if delta < 0 else 1 + + return direction.format(humanized[which]) + + def _ordinal_number(self, n): + return "{}.".format(n) + + +class ChineseCNLocale(Locale): + + names = ["zh", "zh_cn"] + + past = "{0}前" + future = "{0}后" + + timeframes = { + "now": "刚才", + "seconds": "几秒", + "minute": "1分钟", + "minutes": "{0}分钟", + "hour": "1小时", + "hours": "{0}小时", + "day": "1天", + "days": "{0}天", + "month": "1个月", + "months": "{0}个月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "一月", + "二月", + "三月", + "四月", + "五月", + "六月", + "七月", + "八月", + "九月", + "十月", + "十一月", + "十二月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] + day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] + + +class ChineseTWLocale(Locale): + + names = ["zh_tw"] + + past = "{0}前" + future = "{0}後" + + timeframes = { + "now": "剛才", + "seconds": "幾秒", + "minute": "1分鐘", + "minutes": "{0}分鐘", + "hour": "1小時", + "hours": "{0}小時", + "day": "1天", + "days": "{0}天", + "month": "1個月", + "months": "{0}個月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "1月", + "2月", + "3月", + "4月", + "5月", + "6月", + "7月", + "8月", + "9月", + "10月", + "11月", + "12月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "周一", "周二", "周三", "周四", "周五", "周六", "周日"] + day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] + + +class KoreanLocale(Locale): + + names = ["ko", "ko_kr"] + + past = "{0} 전" + future = "{0} 후" + + timeframes = { + "now": "지금", + "seconds": "몇 초", + "minute": "1분", + "minutes": "{0}분", + "hour": "1시간", + "hours": "{0}시간", + "day": "1일", + "days": "{0}일", + "month": "1개월", + "months": "{0}개월", + "year": "1년", + "years": "{0}년", + } + + month_names = [ + "", + "1월", + "2월", + "3월", + "4월", + "5월", + "6월", + "7월", + "8월", + "9월", + "10월", + "11월", + "12월", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"] + day_abbreviations = ["", "월", "화", "수", "목", "금", "토", "일"] + + +# derived locale types & implementations. +class DutchLocale(Locale): + + names = ["nl", "nl_nl"] + + past = "{0} geleden" + future = "over {0}" + + timeframes = { + "now": "nu", + "seconds": "seconden", + "minute": "een minuut", + "minutes": "{0} minuten", + "hour": "een uur", + "hours": "{0} uur", + "day": "een dag", + "days": "{0} dagen", + "month": "een maand", + "months": "{0} maanden", + "year": "een jaar", + "years": "{0} jaar", + } + + # In Dutch names of months and days are not starting with a capital letter + # like in the English language. + month_names = [ + "", + "januari", + "februari", + "maart", + "april", + "mei", + "juni", + "juli", + "augustus", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mrt", + "apr", + "mei", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "maandag", + "dinsdag", + "woensdag", + "donderdag", + "vrijdag", + "zaterdag", + "zondag", + ] + day_abbreviations = ["", "ma", "di", "wo", "do", "vr", "za", "zo"] + + +class SlavicBaseLocale(Locale): + def _format_timeframe(self, timeframe, delta): + + form = self.timeframes[timeframe] + delta = abs(delta) + + if isinstance(form, list): + + if delta % 10 == 1 and delta % 100 != 11: + form = form[0] + elif 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[1] + else: + form = form[2] + + return form.format(delta) + + +class BelarusianLocale(SlavicBaseLocale): + + names = ["be", "be_by"] + + past = "{0} таму" + future = "праз {0}" + + timeframes = { + "now": "зараз", + "seconds": "некалькі секунд", + "minute": "хвіліну", + "minutes": ["{0} хвіліну", "{0} хвіліны", "{0} хвілін"], + "hour": "гадзіну", + "hours": ["{0} гадзіну", "{0} гадзіны", "{0} гадзін"], + "day": "дзень", + "days": ["{0} дзень", "{0} дні", "{0} дзён"], + "month": "месяц", + "months": ["{0} месяц", "{0} месяцы", "{0} месяцаў"], + "year": "год", + "years": ["{0} год", "{0} гады", "{0} гадоў"], + } + + month_names = [ + "", + "студзеня", + "лютага", + "сакавіка", + "красавіка", + "траўня", + "чэрвеня", + "ліпеня", + "жніўня", + "верасня", + "кастрычніка", + "лістапада", + "снежня", + ] + month_abbreviations = [ + "", + "студ", + "лют", + "сак", + "крас", + "трав", + "чэрв", + "ліп", + "жнів", + "вер", + "каст", + "ліст", + "снеж", + ] + + day_names = [ + "", + "панядзелак", + "аўторак", + "серада", + "чацвер", + "пятніца", + "субота", + "нядзеля", + ] + day_abbreviations = ["", "пн", "ат", "ср", "чц", "пт", "сб", "нд"] + + +class PolishLocale(SlavicBaseLocale): + + names = ["pl", "pl_pl"] + + past = "{0} temu" + future = "za {0}" + + timeframes = { + "now": "teraz", + "seconds": "kilka sekund", + "minute": "minutę", + "minutes": ["{0} minut", "{0} minuty", "{0} minut"], + "hour": "godzina", + "hours": ["{0} godzin", "{0} godziny", "{0} godzin"], + "day": "dzień", + "days": ["{0} dzień", "{0} dni", "{0} dni"], + "month": "miesiąc", + "months": ["{0} miesiąc", "{0} miesiące", "{0} miesięcy"], + "year": "rok", + "years": ["{0} rok", "{0} lata", "{0} lat"], + } + + month_names = [ + "", + "styczeń", + "luty", + "marzec", + "kwiecień", + "maj", + "czerwiec", + "lipiec", + "sierpień", + "wrzesień", + "październik", + "listopad", + "grudzień", + ] + month_abbreviations = [ + "", + "sty", + "lut", + "mar", + "kwi", + "maj", + "cze", + "lip", + "sie", + "wrz", + "paź", + "lis", + "gru", + ] + + day_names = [ + "", + "poniedziałek", + "wtorek", + "środa", + "czwartek", + "piątek", + "sobota", + "niedziela", + ] + day_abbreviations = ["", "Pn", "Wt", "Śr", "Czw", "Pt", "So", "Nd"] + + +class RussianLocale(SlavicBaseLocale): + + names = ["ru", "ru_ru"] + + past = "{0} назад" + future = "через {0}" + + timeframes = { + "now": "сейчас", + "seconds": "несколько секунд", + "minute": "минуту", + "minutes": ["{0} минуту", "{0} минуты", "{0} минут"], + "hour": "час", + "hours": ["{0} час", "{0} часа", "{0} часов"], + "day": "день", + "days": ["{0} день", "{0} дня", "{0} дней"], + "month": "месяц", + "months": ["{0} месяц", "{0} месяца", "{0} месяцев"], + "year": "год", + "years": ["{0} год", "{0} года", "{0} лет"], + } + + month_names = [ + "", + "января", + "февраля", + "марта", + "апреля", + "мая", + "июня", + "июля", + "августа", + "сентября", + "октября", + "ноября", + "декабря", + ] + month_abbreviations = [ + "", + "янв", + "фев", + "мар", + "апр", + "май", + "июн", + "июл", + "авг", + "сен", + "окт", + "ноя", + "дек", + ] + + day_names = [ + "", + "понедельник", + "вторник", + "среда", + "четверг", + "пятница", + "суббота", + "воскресенье", + ] + day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "вс"] + + +class AfrikaansLocale(Locale): + + names = ["af", "af_nl"] + + past = "{0} gelede" + future = "in {0}" + + timeframes = { + "now": "nou", + "seconds": "sekondes", + "minute": "minuut", + "minutes": "{0} minute", + "hour": "uur", + "hours": "{0} ure", + "day": "een dag", + "days": "{0} dae", + "month": "een maand", + "months": "{0} maande", + "year": "een jaar", + "years": "{0} jaar", + } + + month_names = [ + "", + "Januarie", + "Februarie", + "Maart", + "April", + "Mei", + "Junie", + "Julie", + "Augustus", + "September", + "Oktober", + "November", + "Desember", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mrt", + "Apr", + "Mei", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Des", + ] + + day_names = [ + "", + "Maandag", + "Dinsdag", + "Woensdag", + "Donderdag", + "Vrydag", + "Saterdag", + "Sondag", + ] + day_abbreviations = ["", "Ma", "Di", "Wo", "Do", "Vr", "Za", "So"] + + +class BulgarianLocale(SlavicBaseLocale): + + names = ["bg", "bg_BG"] + + past = "{0} назад" + future = "напред {0}" + + timeframes = { + "now": "сега", + "seconds": "няколко секунди", + "minute": "минута", + "minutes": ["{0} минута", "{0} минути", "{0} минути"], + "hour": "час", + "hours": ["{0} час", "{0} часа", "{0} часа"], + "day": "ден", + "days": ["{0} ден", "{0} дни", "{0} дни"], + "month": "месец", + "months": ["{0} месец", "{0} месеца", "{0} месеца"], + "year": "година", + "years": ["{0} година", "{0} години", "{0} години"], + } + + month_names = [ + "", + "януари", + "февруари", + "март", + "април", + "май", + "юни", + "юли", + "август", + "септември", + "октомври", + "ноември", + "декември", + ] + month_abbreviations = [ + "", + "ян", + "февр", + "март", + "апр", + "май", + "юни", + "юли", + "авг", + "септ", + "окт", + "ноем", + "дек", + ] + + day_names = [ + "", + "понеделник", + "вторник", + "сряда", + "четвъртък", + "петък", + "събота", + "неделя", + ] + day_abbreviations = ["", "пон", "вт", "ср", "четв", "пет", "съб", "нед"] + + +class UkrainianLocale(SlavicBaseLocale): + + names = ["ua", "uk_ua"] + + past = "{0} тому" + future = "за {0}" + + timeframes = { + "now": "зараз", + "seconds": "кілька секунд", + "minute": "хвилину", + "minutes": ["{0} хвилину", "{0} хвилини", "{0} хвилин"], + "hour": "годину", + "hours": ["{0} годину", "{0} години", "{0} годин"], + "day": "день", + "days": ["{0} день", "{0} дні", "{0} днів"], + "month": "місяць", + "months": ["{0} місяць", "{0} місяці", "{0} місяців"], + "year": "рік", + "years": ["{0} рік", "{0} роки", "{0} років"], + } + + month_names = [ + "", + "січня", + "лютого", + "березня", + "квітня", + "травня", + "червня", + "липня", + "серпня", + "вересня", + "жовтня", + "листопада", + "грудня", + ] + month_abbreviations = [ + "", + "січ", + "лют", + "бер", + "квіт", + "трав", + "черв", + "лип", + "серп", + "вер", + "жовт", + "лист", + "груд", + ] + + day_names = [ + "", + "понеділок", + "вівторок", + "середа", + "четвер", + "п’ятниця", + "субота", + "неділя", + ] + day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "нд"] + + +class _DeutschLocaleCommonMixin(object): + + past = "vor {0}" + future = "in {0}" + + timeframes = { + "now": "gerade eben", + "seconds": "Sekunden", + "minute": "einer Minute", + "minutes": "{0} Minuten", + "hour": "einer Stunde", + "hours": "{0} Stunden", + "day": "einem Tag", + "days": "{0} Tagen", + "month": "einem Monat", + "months": "{0} Monaten", + "year": "einem Jahr", + "years": "{0} Jahren", + } + + timeframes_only_distance = timeframes.copy() + timeframes_only_distance["minute"] = "eine Minute" + timeframes_only_distance["hour"] = "eine Stunde" + timeframes_only_distance["day"] = "ein Tag" + timeframes_only_distance["month"] = "ein Monat" + timeframes_only_distance["year"] = "ein Jahr" + + month_names = [ + "", + "Januar", + "Februar", + "März", + "April", + "Mai", + "Juni", + "Juli", + "August", + "September", + "Oktober", + "November", + "Dezember", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mär", + "Apr", + "Mai", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Dez", + ] + + day_names = [ + "", + "Montag", + "Dienstag", + "Mittwoch", + "Donnerstag", + "Freitag", + "Samstag", + "Sonntag", + ] + + day_abbreviations = ["", "Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"] + + def _ordinal_number(self, n): + return "{}.".format(n) + + def describe(self, timeframe, delta=0, only_distance=False): + """ Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) + + if not only_distance: + humanized = self._format_timeframe(timeframe, delta) + humanized = self._format_relative(humanized, timeframe, delta) + + return humanized + + +class GermanLocale(_DeutschLocaleCommonMixin, Locale): + + names = ["de", "de_de"] + + +class AustrianLocale(_DeutschLocaleCommonMixin, Locale): + + names = ["de_at"] + + month_names = [ + "", + "Jänner", + "Februar", + "März", + "April", + "Mai", + "Juni", + "Juli", + "August", + "September", + "Oktober", + "November", + "Dezember", + ] + + +class NorwegianLocale(Locale): + + names = ["nb", "nb_no"] + + past = "for {0} siden" + future = "om {0}" + + timeframes = { + "now": "nå nettopp", + "seconds": "noen sekunder", + "minute": "ett minutt", + "minutes": "{0} minutter", + "hour": "en time", + "hours": "{0} timer", + "day": "en dag", + "days": "{0} dager", + "month": "en måned", + "months": "{0} måneder", + "year": "ett år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "mars", + "april", + "mai", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "mai", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "des", + ] + + day_names = [ + "", + "mandag", + "tirsdag", + "onsdag", + "torsdag", + "fredag", + "lørdag", + "søndag", + ] + day_abbreviations = ["", "ma", "ti", "on", "to", "fr", "lø", "sø"] + + +class NewNorwegianLocale(Locale): + + names = ["nn", "nn_no"] + + past = "for {0} sidan" + future = "om {0}" + + timeframes = { + "now": "no nettopp", + "seconds": "nokre sekund", + "minute": "ett minutt", + "minutes": "{0} minutt", + "hour": "ein time", + "hours": "{0} timar", + "day": "ein dag", + "days": "{0} dagar", + "month": "en månad", + "months": "{0} månader", + "year": "eit år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "mars", + "april", + "mai", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "mai", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "des", + ] + + day_names = [ + "", + "måndag", + "tysdag", + "onsdag", + "torsdag", + "fredag", + "laurdag", + "sundag", + ] + day_abbreviations = ["", "må", "ty", "on", "to", "fr", "la", "su"] + + +class PortugueseLocale(Locale): + names = ["pt", "pt_pt"] + + past = "há {0}" + future = "em {0}" + + timeframes = { + "now": "agora", + "seconds": "segundos", + "minute": "um minuto", + "minutes": "{0} minutos", + "hour": "uma hora", + "hours": "{0} horas", + "day": "um dia", + "days": "{0} dias", + "month": "um mês", + "months": "{0} meses", + "year": "um ano", + "years": "{0} anos", + } + + month_names = [ + "", + "janeiro", + "fevereiro", + "março", + "abril", + "maio", + "junho", + "julho", + "agosto", + "setembro", + "outubro", + "novembro", + "dezembro", + ] + month_abbreviations = [ + "", + "jan", + "fev", + "mar", + "abr", + "maio", + "jun", + "jul", + "ago", + "set", + "out", + "nov", + "dez", + ] + + day_names = [ + "", + "segunda-feira", + "terça-feira", + "quarta-feira", + "quinta-feira", + "sexta-feira", + "sábado", + "domingo", + ] + day_abbreviations = ["", "seg", "ter", "qua", "qui", "sex", "sab", "dom"] + + +class BrazilianPortugueseLocale(PortugueseLocale): + names = ["pt_br"] + + past = "faz {0}" + + +class TagalogLocale(Locale): + + names = ["tl", "tl_ph"] + + past = "nakaraang {0}" + future = "{0} mula ngayon" + + timeframes = { + "now": "ngayon lang", + "seconds": "segundo", + "minute": "isang minuto", + "minutes": "{0} minuto", + "hour": "isang oras", + "hours": "{0} oras", + "day": "isang araw", + "days": "{0} araw", + "month": "isang buwan", + "months": "{0} buwan", + "year": "isang taon", + "years": "{0} taon", + } + + month_names = [ + "", + "Enero", + "Pebrero", + "Marso", + "Abril", + "Mayo", + "Hunyo", + "Hulyo", + "Agosto", + "Setyembre", + "Oktubre", + "Nobyembre", + "Disyembre", + ] + month_abbreviations = [ + "", + "Ene", + "Peb", + "Mar", + "Abr", + "May", + "Hun", + "Hul", + "Ago", + "Set", + "Okt", + "Nob", + "Dis", + ] + + day_names = [ + "", + "Lunes", + "Martes", + "Miyerkules", + "Huwebes", + "Biyernes", + "Sabado", + "Linggo", + ] + day_abbreviations = ["", "Lun", "Mar", "Miy", "Huw", "Biy", "Sab", "Lin"] + + def _ordinal_number(self, n): + return "ika-{}".format(n) + + +class VietnameseLocale(Locale): + + names = ["vi", "vi_vn"] + + past = "{0} trước" + future = "{0} nữa" + + timeframes = { + "now": "hiện tại", + "seconds": "giây", + "minute": "một phút", + "minutes": "{0} phút", + "hour": "một giờ", + "hours": "{0} giờ", + "day": "một ngày", + "days": "{0} ngày", + "month": "một tháng", + "months": "{0} tháng", + "year": "một năm", + "years": "{0} năm", + } + + month_names = [ + "", + "Tháng Một", + "Tháng Hai", + "Tháng Ba", + "Tháng Tư", + "Tháng Năm", + "Tháng Sáu", + "Tháng Bảy", + "Tháng Tám", + "Tháng Chín", + "Tháng Mười", + "Tháng Mười Một", + "Tháng Mười Hai", + ] + month_abbreviations = [ + "", + "Tháng 1", + "Tháng 2", + "Tháng 3", + "Tháng 4", + "Tháng 5", + "Tháng 6", + "Tháng 7", + "Tháng 8", + "Tháng 9", + "Tháng 10", + "Tháng 11", + "Tháng 12", + ] + + day_names = [ + "", + "Thứ Hai", + "Thứ Ba", + "Thứ Tư", + "Thứ Năm", + "Thứ Sáu", + "Thứ Bảy", + "Chủ Nhật", + ] + day_abbreviations = ["", "Thứ 2", "Thứ 3", "Thứ 4", "Thứ 5", "Thứ 6", "Thứ 7", "CN"] + + +class TurkishLocale(Locale): + + names = ["tr", "tr_tr"] + + past = "{0} önce" + future = "{0} sonra" + + timeframes = { + "now": "şimdi", + "seconds": "saniye", + "minute": "bir dakika", + "minutes": "{0} dakika", + "hour": "bir saat", + "hours": "{0} saat", + "day": "bir gün", + "days": "{0} gün", + "month": "bir ay", + "months": "{0} ay", + "year": "yıl", + "years": "{0} yıl", + } + + month_names = [ + "", + "Ocak", + "Şubat", + "Mart", + "Nisan", + "Mayıs", + "Haziran", + "Temmuz", + "Ağustos", + "Eylül", + "Ekim", + "Kasım", + "Aralık", + ] + month_abbreviations = [ + "", + "Oca", + "Şub", + "Mar", + "Nis", + "May", + "Haz", + "Tem", + "Ağu", + "Eyl", + "Eki", + "Kas", + "Ara", + ] + + day_names = [ + "", + "Pazartesi", + "Salı", + "Çarşamba", + "Perşembe", + "Cuma", + "Cumartesi", + "Pazar", + ] + day_abbreviations = ["", "Pzt", "Sal", "Çar", "Per", "Cum", "Cmt", "Paz"] + + +class AzerbaijaniLocale(Locale): + + names = ["az", "az_az"] + + past = "{0} əvvəl" + future = "{0} sonra" + + timeframes = { + "now": "indi", + "seconds": "saniyə", + "minute": "bir dəqiqə", + "minutes": "{0} dəqiqə", + "hour": "bir saat", + "hours": "{0} saat", + "day": "bir gün", + "days": "{0} gün", + "month": "bir ay", + "months": "{0} ay", + "year": "il", + "years": "{0} il", + } + + month_names = [ + "", + "Yanvar", + "Fevral", + "Mart", + "Aprel", + "May", + "İyun", + "İyul", + "Avqust", + "Sentyabr", + "Oktyabr", + "Noyabr", + "Dekabr", + ] + month_abbreviations = [ + "", + "Yan", + "Fev", + "Mar", + "Apr", + "May", + "İyn", + "İyl", + "Avq", + "Sen", + "Okt", + "Noy", + "Dek", + ] + + day_names = [ + "", + "Bazar ertəsi", + "Çərşənbə axşamı", + "Çərşənbə", + "Cümə axşamı", + "Cümə", + "Şənbə", + "Bazar", + ] + day_abbreviations = ["", "Ber", "Çax", "Çər", "Cax", "Cüm", "Şnb", "Bzr"] + + +class ArabicLocale(Locale): + names = [ + "ar", + "ar_ae", + "ar_bh", + "ar_dj", + "ar_eg", + "ar_eh", + "ar_er", + "ar_km", + "ar_kw", + "ar_ly", + "ar_om", + "ar_qa", + "ar_sa", + "ar_sd", + "ar_so", + "ar_ss", + "ar_td", + "ar_ye", + ] + + past = "منذ {0}" + future = "خلال {0}" + + timeframes = { + "now": "الآن", + "seconds": {"double": "ثانيتين", "ten": "{0} ثوان", "higher": "{0} ثانية"}, + "minute": "دقيقة", + "minutes": {"double": "دقيقتين", "ten": "{0} دقائق", "higher": "{0} دقيقة"}, + "hour": "ساعة", + "hours": {"double": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"}, + "day": "يوم", + "days": {"double": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"}, + "month": "شهر", + "months": {"double": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"}, + "year": "سنة", + "years": {"double": "سنتين", "ten": "{0} سنوات", "higher": "{0} سنة"}, + } + + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "مايو", + "يونيو", + "يوليو", + "أغسطس", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "مايو", + "يونيو", + "يوليو", + "أغسطس", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + + day_names = [ + "", + "الإثنين", + "الثلاثاء", + "الأربعاء", + "الخميس", + "الجمعة", + "السبت", + "الأحد", + ] + day_abbreviations = ["", "إثنين", "ثلاثاء", "أربعاء", "خميس", "جمعة", "سبت", "أحد"] + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + delta = abs(delta) + if isinstance(form, dict): + if delta == 2: + form = form["double"] + elif delta > 2 and delta <= 10: + form = form["ten"] + else: + form = form["higher"] + + return form.format(delta) + + +class LevantArabicLocale(ArabicLocale): + names = ["ar_iq", "ar_jo", "ar_lb", "ar_ps", "ar_sy"] + month_names = [ + "", + "كانون الثاني", + "شباط", + "آذار", + "نيسان", + "أيار", + "حزيران", + "تموز", + "آب", + "أيلول", + "تشرين الأول", + "تشرين الثاني", + "كانون الأول", + ] + month_abbreviations = [ + "", + "كانون الثاني", + "شباط", + "آذار", + "نيسان", + "أيار", + "حزيران", + "تموز", + "آب", + "أيلول", + "تشرين الأول", + "تشرين الثاني", + "كانون الأول", + ] + + +class AlgeriaTunisiaArabicLocale(ArabicLocale): + names = ["ar_tn", "ar_dz"] + month_names = [ + "", + "جانفي", + "فيفري", + "مارس", + "أفريل", + "ماي", + "جوان", + "جويلية", + "أوت", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + month_abbreviations = [ + "", + "جانفي", + "فيفري", + "مارس", + "أفريل", + "ماي", + "جوان", + "جويلية", + "أوت", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + + +class MauritaniaArabicLocale(ArabicLocale): + names = ["ar_mr"] + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "إبريل", + "مايو", + "يونيو", + "يوليو", + "أغشت", + "شتمبر", + "أكتوبر", + "نوفمبر", + "دجمبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "إبريل", + "مايو", + "يونيو", + "يوليو", + "أغشت", + "شتمبر", + "أكتوبر", + "نوفمبر", + "دجمبر", + ] + + +class MoroccoArabicLocale(ArabicLocale): + names = ["ar_ma"] + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "ماي", + "يونيو", + "يوليوز", + "غشت", + "شتنبر", + "أكتوبر", + "نونبر", + "دجنبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "ماي", + "يونيو", + "يوليوز", + "غشت", + "شتنبر", + "أكتوبر", + "نونبر", + "دجنبر", + ] + + +class IcelandicLocale(Locale): + def _format_timeframe(self, timeframe, delta): + + timeframe = self.timeframes[timeframe] + if delta < 0: + timeframe = timeframe[0] + elif delta > 0: + timeframe = timeframe[1] + + return timeframe.format(abs(delta)) + + names = ["is", "is_is"] + + past = "fyrir {0} síðan" + future = "eftir {0}" + + timeframes = { + "now": "rétt í þessu", + "seconds": ("nokkrum sekúndum", "nokkrar sekúndur"), + "minute": ("einni mínútu", "eina mínútu"), + "minutes": ("{0} mínútum", "{0} mínútur"), + "hour": ("einum tíma", "einn tíma"), + "hours": ("{0} tímum", "{0} tíma"), + "day": ("einum degi", "einn dag"), + "days": ("{0} dögum", "{0} daga"), + "month": ("einum mánuði", "einn mánuð"), + "months": ("{0} mánuðum", "{0} mánuði"), + "year": ("einu ári", "eitt ár"), + "years": ("{0} árum", "{0} ár"), + } + + meridians = {"am": "f.h.", "pm": "e.h.", "AM": "f.h.", "PM": "e.h."} + + month_names = [ + "", + "janúar", + "febrúar", + "mars", + "apríl", + "maí", + "júní", + "júlí", + "ágúst", + "september", + "október", + "nóvember", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maí", + "jún", + "júl", + "ágú", + "sep", + "okt", + "nóv", + "des", + ] + + day_names = [ + "", + "mánudagur", + "þriðjudagur", + "miðvikudagur", + "fimmtudagur", + "föstudagur", + "laugardagur", + "sunnudagur", + ] + day_abbreviations = ["", "mán", "þri", "mið", "fim", "fös", "lau", "sun"] + + +class DanishLocale(Locale): + + names = ["da", "da_dk"] + + past = "for {0} siden" + future = "efter {0}" + + timeframes = { + "now": "lige nu", + "seconds": "et par sekunder", + "minute": "et minut", + "minutes": "{0} minutter", + "hour": "en time", + "hours": "{0} timer", + "day": "en dag", + "days": "{0} dage", + "month": "en måned", + "months": "{0} måneder", + "year": "et år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "marts", + "april", + "maj", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "mandag", + "tirsdag", + "onsdag", + "torsdag", + "fredag", + "lørdag", + "søndag", + ] + day_abbreviations = ["", "man", "tir", "ons", "tor", "fre", "lør", "søn"] + + +class MalayalamLocale(Locale): + + names = ["ml"] + + past = "{0} മുമ്പ്" + future = "{0} ശേഷം" + + timeframes = { + "now": "ഇപ്പോൾ", + "seconds": "സെക്കന്റ്‌", + "minute": "ഒരു മിനിറ്റ്", + "minutes": "{0} മിനിറ്റ്", + "hour": "ഒരു മണിക്കൂർ", + "hours": "{0} മണിക്കൂർ", + "day": "ഒരു ദിവസം ", + "days": "{0} ദിവസം ", + "month": "ഒരു മാസം ", + "months": "{0} മാസം ", + "year": "ഒരു വർഷം ", + "years": "{0} വർഷം ", + } + + meridians = { + "am": "രാവിലെ", + "pm": "ഉച്ചക്ക് ശേഷം", + "AM": "രാവിലെ", + "PM": "ഉച്ചക്ക് ശേഷം", + } + + month_names = [ + "", + "ജനുവരി", + "ഫെബ്രുവരി", + "മാർച്ച്‌", + "ഏപ്രിൽ ", + "മെയ്‌ ", + "ജൂണ്‍", + "ജൂലൈ", + "ഓഗസ്റ്റ്‌", + "സെപ്റ്റംബർ", + "ഒക്ടോബർ", + "നവംബർ", + "ഡിസംബർ", + ] + month_abbreviations = [ + "", + "ജനു", + "ഫെബ് ", + "മാർ", + "ഏപ്രിൽ", + "മേയ്", + "ജൂണ്‍", + "ജൂലൈ", + "ഓഗസ്റ", + "സെപ്റ്റ", + "ഒക്ടോ", + "നവം", + "ഡിസം", + ] + + day_names = ["", "തിങ്കള്‍", "ചൊവ്വ", "ബുധന്‍", "വ്യാഴം", "വെള്ളി", "ശനി", "ഞായര്‍"] + day_abbreviations = [ + "", + "തിങ്കള്‍", + "ചൊവ്വ", + "ബുധന്‍", + "വ്യാഴം", + "വെള്ളി", + "ശനി", + "ഞായര്‍", + ] + + +class HindiLocale(Locale): + + names = ["hi"] + + past = "{0} पहले" + future = "{0} बाद" + + timeframes = { + "now": "अभी", + "seconds": "सेकंड्", + "minute": "एक मिनट ", + "minutes": "{0} मिनट ", + "hour": "एक घंटा", + "hours": "{0} घंटे", + "day": "एक दिन", + "days": "{0} दिन", + "month": "एक माह ", + "months": "{0} महीने ", + "year": "एक वर्ष ", + "years": "{0} साल ", + } + + meridians = {"am": "सुबह", "pm": "शाम", "AM": "सुबह", "PM": "शाम"} + + month_names = [ + "", + "जनवरी", + "फरवरी", + "मार्च", + "अप्रैल ", + "मई", + "जून", + "जुलाई", + "अगस्त", + "सितंबर", + "अक्टूबर", + "नवंबर", + "दिसंबर", + ] + month_abbreviations = [ + "", + "जन", + "फ़र", + "मार्च", + "अप्रै", + "मई", + "जून", + "जुलाई", + "आग", + "सित", + "अकत", + "नवे", + "दिस", + ] + + day_names = [ + "", + "सोमवार", + "मंगलवार", + "बुधवार", + "गुरुवार", + "शुक्रवार", + "शनिवार", + "रविवार", + ] + day_abbreviations = ["", "सोम", "मंगल", "बुध", "गुरुवार", "शुक्र", "शनि", "रवि"] + + +class CzechLocale(Locale): + names = ["cs", "cs_cz"] + + timeframes = { + "now": "Teď", + "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekund"]}, + "minute": {"past": "minutou", "future": "minutu", "zero": "{0} minut"}, + "minutes": {"past": "{0} minutami", "future": ["{0} minuty", "{0} minut"]}, + "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodin"}, + "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodin"]}, + "day": {"past": "dnem", "future": "den", "zero": "{0} dnů"}, + "days": {"past": "{0} dny", "future": ["{0} dny", "{0} dnů"]}, + "month": {"past": "měsícem", "future": "měsíc", "zero": "{0} měsíců"}, + "months": {"past": "{0} měsíci", "future": ["{0} měsíce", "{0} měsíců"]}, + "year": {"past": "rokem", "future": "rok", "zero": "{0} let"}, + "years": {"past": "{0} lety", "future": ["{0} roky", "{0} let"]}, + } + + past = "Před {0}" + future = "Za {0}" + + month_names = [ + "", + "leden", + "únor", + "březen", + "duben", + "květen", + "červen", + "červenec", + "srpen", + "září", + "říjen", + "listopad", + "prosinec", + ] + month_abbreviations = [ + "", + "led", + "úno", + "bře", + "dub", + "kvě", + "čvn", + "čvc", + "srp", + "zář", + "říj", + "lis", + "pro", + ] + + day_names = [ + "", + "pondělí", + "úterý", + "středa", + "čtvrtek", + "pátek", + "sobota", + "neděle", + ] + day_abbreviations = ["", "po", "út", "st", "čt", "pá", "so", "ne"] + + def _format_timeframe(self, timeframe, delta): + """Czech aware time frame format function, takes into account + the differences between past and future forms.""" + form = self.timeframes[timeframe] + if isinstance(form, dict): + if delta == 0: + form = form["zero"] # And *never* use 0 in the singular! + elif delta > 0: + form = form["future"] + else: + form = form["past"] + delta = abs(delta) + + if isinstance(form, list): + if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[0] + else: + form = form[1] + + return form.format(delta) + + +class SlovakLocale(Locale): + names = ["sk", "sk_sk"] + + timeframes = { + "now": "Teraz", + "seconds": {"past": "pár sekundami", "future": ["{0} sekundy", "{0} sekúnd"]}, + "minute": {"past": "minútou", "future": "minútu", "zero": "{0} minút"}, + "minutes": {"past": "{0} minútami", "future": ["{0} minúty", "{0} minút"]}, + "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodín"}, + "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodín"]}, + "day": {"past": "dňom", "future": "deň", "zero": "{0} dní"}, + "days": {"past": "{0} dňami", "future": ["{0} dni", "{0} dní"]}, + "month": {"past": "mesiacom", "future": "mesiac", "zero": "{0} mesiacov"}, + "months": {"past": "{0} mesiacmi", "future": ["{0} mesiace", "{0} mesiacov"]}, + "year": {"past": "rokom", "future": "rok", "zero": "{0} rokov"}, + "years": {"past": "{0} rokmi", "future": ["{0} roky", "{0} rokov"]}, + } + + past = "Pred {0}" + future = "O {0}" + + month_names = [ + "", + "január", + "február", + "marec", + "apríl", + "máj", + "jún", + "júl", + "august", + "september", + "október", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "máj", + "jún", + "júl", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "pondelok", + "utorok", + "streda", + "štvrtok", + "piatok", + "sobota", + "nedeľa", + ] + day_abbreviations = ["", "po", "ut", "st", "št", "pi", "so", "ne"] + + def _format_timeframe(self, timeframe, delta): + """Slovak aware time frame format function, takes into account + the differences between past and future forms.""" + form = self.timeframes[timeframe] + if isinstance(form, dict): + if delta == 0: + form = form["zero"] # And *never* use 0 in the singular! + elif delta > 0: + form = form["future"] + else: + form = form["past"] + delta = abs(delta) + + if isinstance(form, list): + if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[0] + else: + form = form[1] + + return form.format(delta) + + +class FarsiLocale(Locale): + + names = ["fa", "fa_ir"] + + past = "{0} قبل" + future = "در {0}" + + timeframes = { + "now": "اکنون", + "seconds": "ثانیه", + "minute": "یک دقیقه", + "minutes": "{0} دقیقه", + "hour": "یک ساعت", + "hours": "{0} ساعت", + "day": "یک روز", + "days": "{0} روز", + "month": "یک ماه", + "months": "{0} ماه", + "year": "یک سال", + "years": "{0} سال", + } + + meridians = { + "am": "قبل از ظهر", + "pm": "بعد از ظهر", + "AM": "قبل از ظهر", + "PM": "بعد از ظهر", + } + + month_names = [ + "", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ] + + day_names = [ + "", + "دو شنبه", + "سه شنبه", + "چهارشنبه", + "پنجشنبه", + "جمعه", + "شنبه", + "یکشنبه", + ] + day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] + + +class MacedonianLocale(Locale): + names = ["mk", "mk_mk"] + + past = "пред {0}" + future = "за {0}" + + timeframes = { + "now": "сега", + "seconds": "секунди", + "minute": "една минута", + "minutes": "{0} минути", + "hour": "еден саат", + "hours": "{0} саати", + "day": "еден ден", + "days": "{0} дена", + "month": "еден месец", + "months": "{0} месеци", + "year": "една година", + "years": "{0} години", + } + + meridians = {"am": "дп", "pm": "пп", "AM": "претпладне", "PM": "попладне"} + + month_names = [ + "", + "Јануари", + "Февруари", + "Март", + "Април", + "Мај", + "Јуни", + "Јули", + "Август", + "Септември", + "Октомври", + "Ноември", + "Декември", + ] + month_abbreviations = [ + "", + "Јан.", + " Фев.", + " Мар.", + " Апр.", + " Мај", + " Јун.", + " Јул.", + " Авг.", + " Септ.", + " Окт.", + " Ноем.", + " Декем.", + ] + + day_names = [ + "", + "Понеделник", + " Вторник", + " Среда", + " Четврток", + " Петок", + " Сабота", + " Недела", + ] + day_abbreviations = [ + "", + "Пон.", + " Вт.", + " Сре.", + " Чет.", + " Пет.", + " Саб.", + " Нед.", + ] + + +class HebrewLocale(Locale): + + names = ["he", "he_IL"] + + past = "לפני {0}" + future = "בעוד {0}" + + timeframes = { + "now": "הרגע", + "seconds": "שניות", + "minute": "דקה", + "minutes": "{0} דקות", + "hour": "שעה", + "hours": "{0} שעות", + "2-hours": "שעתיים", + "day": "יום", + "days": "{0} ימים", + "2-days": "יומיים", + "month": "חודש", + "months": "{0} חודשים", + "2-months": "חודשיים", + "year": "שנה", + "years": "{0} שנים", + "2-years": "שנתיים", + } + + meridians = { + "am": 'לפנ"צ', + "pm": 'אחר"צ', + "AM": "לפני הצהריים", + "PM": "אחרי הצהריים", + } + + month_names = [ + "", + "ינואר", + "פברואר", + "מרץ", + "אפריל", + "מאי", + "יוני", + "יולי", + "אוגוסט", + "ספטמבר", + "אוקטובר", + "נובמבר", + "דצמבר", + ] + month_abbreviations = [ + "", + "ינו׳", + "פבר׳", + "מרץ", + "אפר׳", + "מאי", + "יוני", + "יולי", + "אוג׳", + "ספט׳", + "אוק׳", + "נוב׳", + "דצמ׳", + ] + + day_names = ["", "שני", "שלישי", "רביעי", "חמישי", "שישי", "שבת", "ראשון"] + day_abbreviations = ["", "ב׳", "ג׳", "ד׳", "ה׳", "ו׳", "ש׳", "א׳"] + + def _format_timeframe(self, timeframe, delta): + """Hebrew couple of aware""" + couple = "2-{}".format(timeframe) + if abs(delta) == 2 and couple in self.timeframes: + return self.timeframes[couple].format(abs(delta)) + else: + return self.timeframes[timeframe].format(abs(delta)) + + +class MarathiLocale(Locale): + + names = ["mr"] + + past = "{0} आधी" + future = "{0} नंतर" + + timeframes = { + "now": "सद्य", + "seconds": "सेकंद", + "minute": "एक मिनिट ", + "minutes": "{0} मिनिट ", + "hour": "एक तास", + "hours": "{0} तास", + "day": "एक दिवस", + "days": "{0} दिवस", + "month": "एक महिना ", + "months": "{0} महिने ", + "year": "एक वर्ष ", + "years": "{0} वर्ष ", + } + + meridians = {"am": "सकाळ", "pm": "संध्याकाळ", "AM": "सकाळ", "PM": "संध्याकाळ"} + + month_names = [ + "", + "जानेवारी", + "फेब्रुवारी", + "मार्च", + "एप्रिल", + "मे", + "जून", + "जुलै", + "अॉगस्ट", + "सप्टेंबर", + "अॉक्टोबर", + "नोव्हेंबर", + "डिसेंबर", + ] + month_abbreviations = [ + "", + "जान", + "फेब्रु", + "मार्च", + "एप्रि", + "मे", + "जून", + "जुलै", + "अॉग", + "सप्टें", + "अॉक्टो", + "नोव्हें", + "डिसें", + ] + + day_names = [ + "", + "सोमवार", + "मंगळवार", + "बुधवार", + "गुरुवार", + "शुक्रवार", + "शनिवार", + "रविवार", + ] + day_abbreviations = ["", "सोम", "मंगळ", "बुध", "गुरु", "शुक्र", "शनि", "रवि"] + + +def _map_locales(): + + locales = {} + + for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): + if issubclass(cls, Locale): + for name in cls.names: + locales[name.lower()] = cls + + return locales + + +class CatalanLocale(Locale): + names = ["ca", "ca_es", "ca_ad", "ca_fr", "ca_it"] + past = "Fa {0}" + future = "En {0}" + + timeframes = { + "now": "Ara mateix", + "seconds": "segons", + "minute": "1 minut", + "minutes": "{0} minuts", + "hour": "una hora", + "hours": "{0} hores", + "day": "un dia", + "days": "{0} dies", + "month": "un mes", + "months": "{0} mesos", + "year": "un any", + "years": "{0} anys", + } + + month_names = [ + "", + "Gener", + "Febrer", + "Març", + "Abril", + "Maig", + "Juny", + "Juliol", + "Agost", + "Setembre", + "Octubre", + "Novembre", + "Desembre", + ] + month_abbreviations = [ + "", + "Gener", + "Febrer", + "Març", + "Abril", + "Maig", + "Juny", + "Juliol", + "Agost", + "Setembre", + "Octubre", + "Novembre", + "Desembre", + ] + day_names = [ + "", + "Dilluns", + "Dimarts", + "Dimecres", + "Dijous", + "Divendres", + "Dissabte", + "Diumenge", + ] + day_abbreviations = [ + "", + "Dilluns", + "Dimarts", + "Dimecres", + "Dijous", + "Divendres", + "Dissabte", + "Diumenge", + ] + + +class BasqueLocale(Locale): + names = ["eu", "eu_eu"] + past = "duela {0}" + future = "{0}" # I don't know what's the right phrase in Basque for the future. + + timeframes = { + "now": "Orain", + "seconds": "segundu", + "minute": "minutu bat", + "minutes": "{0} minutu", + "hour": "ordu bat", + "hours": "{0} ordu", + "day": "egun bat", + "days": "{0} egun", + "month": "hilabete bat", + "months": "{0} hilabet", + "year": "urte bat", + "years": "{0} urte", + } + + month_names = [ + "", + "urtarrilak", + "otsailak", + "martxoak", + "apirilak", + "maiatzak", + "ekainak", + "uztailak", + "abuztuak", + "irailak", + "urriak", + "azaroak", + "abenduak", + ] + month_abbreviations = [ + "", + "urt", + "ots", + "mar", + "api", + "mai", + "eka", + "uzt", + "abu", + "ira", + "urr", + "aza", + "abe", + ] + day_names = [ + "", + "astelehena", + "asteartea", + "asteazkena", + "osteguna", + "ostirala", + "larunbata", + "igandea", + ] + day_abbreviations = ["", "al", "ar", "az", "og", "ol", "lr", "ig"] + + +class HungarianLocale(Locale): + + names = ["hu", "hu_hu"] + + past = "{0} ezelőtt" + future = "{0} múlva" + + timeframes = { + "now": "éppen most", + "seconds": {"past": "másodpercekkel", "future": "pár másodperc"}, + "minute": {"past": "egy perccel", "future": "egy perc"}, + "minutes": {"past": "{0} perccel", "future": "{0} perc"}, + "hour": {"past": "egy órával", "future": "egy óra"}, + "hours": {"past": "{0} órával", "future": "{0} óra"}, + "day": {"past": "egy nappal", "future": "egy nap"}, + "days": {"past": "{0} nappal", "future": "{0} nap"}, + "month": {"past": "egy hónappal", "future": "egy hónap"}, + "months": {"past": "{0} hónappal", "future": "{0} hónap"}, + "year": {"past": "egy évvel", "future": "egy év"}, + "years": {"past": "{0} évvel", "future": "{0} év"}, + } + + month_names = [ + "", + "január", + "február", + "március", + "április", + "május", + "június", + "július", + "augusztus", + "szeptember", + "október", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "febr", + "márc", + "ápr", + "máj", + "jún", + "júl", + "aug", + "szept", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "hétfő", + "kedd", + "szerda", + "csütörtök", + "péntek", + "szombat", + "vasárnap", + ] + day_abbreviations = ["", "hét", "kedd", "szer", "csüt", "pént", "szom", "vas"] + + meridians = {"am": "de", "pm": "du", "AM": "DE", "PM": "DU"} + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + + if isinstance(form, dict): + if delta > 0: + form = form["future"] + else: + form = form["past"] + + return form.format(abs(delta)) + + +class EsperantoLocale(Locale): + names = ["eo", "eo_xx"] + past = "antaŭ {0}" + future = "post {0}" + + timeframes = { + "now": "nun", + "seconds": "kelkaj sekundoj", + "minute": "unu minuto", + "minutes": "{0} minutoj", + "hour": "un horo", + "hours": "{0} horoj", + "day": "unu tago", + "days": "{0} tagoj", + "month": "unu monato", + "months": "{0} monatoj", + "year": "unu jaro", + "years": "{0} jaroj", + } + + month_names = [ + "", + "januaro", + "februaro", + "marto", + "aprilo", + "majo", + "junio", + "julio", + "aŭgusto", + "septembro", + "oktobro", + "novembro", + "decembro", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aŭg", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "lundo", + "mardo", + "merkredo", + "ĵaŭdo", + "vendredo", + "sabato", + "dimanĉo", + ] + day_abbreviations = ["", "lun", "mar", "mer", "ĵaŭ", "ven", "sab", "dim"] + + meridians = {"am": "atm", "pm": "ptm", "AM": "ATM", "PM": "PTM"} + + ordinal_day_re = r"((?P[1-3]?[0-9](?=a))a)" + + def _ordinal_number(self, n): + return "{}a".format(n) + + +class ThaiLocale(Locale): + + names = ["th", "th_th"] + + past = "{0}{1}ที่ผ่านมา" + future = "ในอีก{1}{0}" + + timeframes = { + "now": "ขณะนี้", + "seconds": "ไม่กี่วินาที", + "minute": "1 นาที", + "minutes": "{0} นาที", + "hour": "1 ชั่วโมง", + "hours": "{0} ชั่วโมง", + "day": "1 วัน", + "days": "{0} วัน", + "month": "1 เดือน", + "months": "{0} เดือน", + "year": "1 ปี", + "years": "{0} ปี", + } + + month_names = [ + "", + "มกราคม", + "กุมภาพันธ์", + "มีนาคม", + "เมษายน", + "พฤษภาคม", + "มิถุนายน", + "กรกฎาคม", + "สิงหาคม", + "กันยายน", + "ตุลาคม", + "พฤศจิกายน", + "ธันวาคม", + ] + month_abbreviations = [ + "", + "ม.ค.", + "ก.พ.", + "มี.ค.", + "เม.ย.", + "พ.ค.", + "มิ.ย.", + "ก.ค.", + "ส.ค.", + "ก.ย.", + "ต.ค.", + "พ.ย.", + "ธ.ค.", + ] + + day_names = ["", "จันทร์", "อังคาร", "พุธ", "พฤหัสบดี", "ศุกร์", "เสาร์", "อาทิตย์"] + day_abbreviations = ["", "จ", "อ", "พ", "พฤ", "ศ", "ส", "อา"] + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + BE_OFFSET = 543 + + def year_full(self, year): + """Thai always use Buddhist Era (BE) which is CE + 543""" + year += self.BE_OFFSET + return "{:04d}".format(year) + + def year_abbreviation(self, year): + """Thai always use Buddhist Era (BE) which is CE + 543""" + year += self.BE_OFFSET + return "{:04d}".format(year)[2:] + + def _format_relative(self, humanized, timeframe, delta): + """Thai normally doesn't have any space between words""" + if timeframe == "now": + return humanized + space = "" if timeframe == "seconds" else " " + direction = self.past if delta < 0 else self.future + + return direction.format(humanized, space) + + +class BengaliLocale(Locale): + + names = ["bn", "bn_bd", "bn_in"] + + past = "{0} আগে" + future = "{0} পরে" + + timeframes = { + "now": "এখন", + "seconds": "সেকেন্ড", + "minute": "এক মিনিট", + "minutes": "{0} মিনিট", + "hour": "এক ঘণ্টা", + "hours": "{0} ঘণ্টা", + "day": "এক দিন", + "days": "{0} দিন", + "month": "এক মাস", + "months": "{0} মাস ", + "year": "এক বছর", + "years": "{0} বছর", + } + + meridians = {"am": "সকাল", "pm": "বিকাল", "AM": "সকাল", "PM": "বিকাল"} + + month_names = [ + "", + "জানুয়ারি", + "ফেব্রুয়ারি", + "মার্চ", + "এপ্রিল", + "মে", + "জুন", + "জুলাই", + "আগস্ট", + "সেপ্টেম্বর", + "অক্টোবর", + "নভেম্বর", + "ডিসেম্বর", + ] + month_abbreviations = [ + "", + "জানু", + "ফেব", + "মার্চ", + "এপ্রি", + "মে", + "জুন", + "জুল", + "অগা", + "সেপ্ট", + "অক্টো", + "নভে", + "ডিসে", + ] + + day_names = [ + "", + "সোমবার", + "মঙ্গলবার", + "বুধবার", + "বৃহস্পতিবার", + "শুক্রবার", + "শনিবার", + "রবিবার", + ] + day_abbreviations = ["", "সোম", "মঙ্গল", "বুধ", "বৃহঃ", "শুক্র", "শনি", "রবি"] + + def _ordinal_number(self, n): + if n > 10 or n == 0: + return "{}তম".format(n) + if n in [1, 5, 7, 8, 9, 10]: + return "{}ম".format(n) + if n in [2, 3]: + return "{}য়".format(n) + if n == 4: + return "{}র্থ".format(n) + if n == 6: + return "{}ষ্ঠ".format(n) + + +class RomanshLocale(Locale): + + names = ["rm", "rm_ch"] + + past = "avant {0}" + future = "en {0}" + + timeframes = { + "now": "en quest mument", + "seconds": "secundas", + "minute": "ina minuta", + "minutes": "{0} minutas", + "hour": "in'ura", + "hours": "{0} ura", + "day": "in di", + "days": "{0} dis", + "month": "in mais", + "months": "{0} mais", + "year": "in onn", + "years": "{0} onns", + } + + month_names = [ + "", + "schaner", + "favrer", + "mars", + "avrigl", + "matg", + "zercladur", + "fanadur", + "avust", + "settember", + "october", + "november", + "december", + ] + + month_abbreviations = [ + "", + "schan", + "fav", + "mars", + "avr", + "matg", + "zer", + "fan", + "avu", + "set", + "oct", + "nov", + "dec", + ] + + day_names = [ + "", + "glindesdi", + "mardi", + "mesemna", + "gievgia", + "venderdi", + "sonda", + "dumengia", + ] + + day_abbreviations = ["", "gli", "ma", "me", "gie", "ve", "so", "du"] + + +class SwissLocale(Locale): + + names = ["de", "de_ch"] + + past = "vor {0}" + future = "in {0}" + + timeframes = { + "now": "gerade eben", + "seconds": "Sekunden", + "minute": "einer Minute", + "minutes": "{0} Minuten", + "hour": "einer Stunde", + "hours": "{0} Stunden", + "day": "einem Tag", + "days": "{0} Tagen", + "month": "einem Monat", + "months": "{0} Monaten", + "year": "einem Jahr", + "years": "{0} Jahren", + } + + month_names = [ + "", + "Januar", + "Februar", + "März", + "April", + "Mai", + "Juni", + "Juli", + "August", + "September", + "Oktober", + "November", + "Dezember", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mär", + "Apr", + "Mai", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Dez", + ] + + day_names = [ + "", + "Montag", + "Dienstag", + "Mittwoch", + "Donnerstag", + "Freitag", + "Samstag", + "Sonntag", + ] + + day_abbreviations = ["", "Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"] + + +class RomanianLocale(Locale): + names = ["ro", "ro_ro"] + + past = "{0} în urmă" + future = "peste {0}" + + timeframes = { + "now": "acum", + "seconds": "câteva secunde", + "minute": "un minut", + "minutes": "{0} minute", + "hour": "o oră", + "hours": "{0} ore", + "day": "o zi", + "days": "{0} zile", + "month": "o lună", + "months": "{0} luni", + "year": "un an", + "years": "{0} ani", + } + + month_names = [ + "", + "ianuarie", + "februarie", + "martie", + "aprilie", + "mai", + "iunie", + "iulie", + "august", + "septembrie", + "octombrie", + "noiembrie", + "decembrie", + ] + month_abbreviations = [ + "", + "ian", + "febr", + "mart", + "apr", + "mai", + "iun", + "iul", + "aug", + "sept", + "oct", + "nov", + "dec", + ] + + day_names = [ + "", + "luni", + "marți", + "miercuri", + "joi", + "vineri", + "sâmbătă", + "duminică", + ] + day_abbreviations = ["", "Lun", "Mar", "Mie", "Joi", "Vin", "Sâm", "Dum"] + + +class SlovenianLocale(Locale): + names = ["sl", "sl_si"] + + past = "pred {0}" + future = "čez {0}" + + timeframes = { + "now": "zdaj", + "seconds": "sekund", + "minute": "minuta", + "minutes": "{0} minutami", + "hour": "uro", + "hours": "{0} ur", + "day": "dan", + "days": "{0} dni", + "month": "mesec", + "months": "{0} mesecev", + "year": "leto", + "years": "{0} let", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + month_names = [ + "", + "Januar", + "Februar", + "Marec", + "April", + "Maj", + "Junij", + "Julij", + "Avgust", + "September", + "Oktober", + "November", + "December", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "Maj", + "Jun", + "Jul", + "Avg", + "Sep", + "Okt", + "Nov", + "Dec", + ] + + day_names = [ + "", + "Ponedeljek", + "Torek", + "Sreda", + "Četrtek", + "Petek", + "Sobota", + "Nedelja", + ] + + day_abbreviations = ["", "Pon", "Tor", "Sre", "Čet", "Pet", "Sob", "Ned"] + + +class IndonesianLocale(Locale): + + names = ["id", "id_id"] + + past = "{0} yang lalu" + future = "dalam {0}" + + timeframes = { + "now": "baru saja", + "seconds": "detik", + "minute": "1 menit", + "minutes": "{0} menit", + "hour": "1 jam", + "hours": "{0} jam", + "day": "1 hari", + "days": "{0} hari", + "month": "1 bulan", + "months": "{0} bulan", + "year": "1 tahun", + "years": "{0} tahun", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + month_names = [ + "", + "Januari", + "Februari", + "Maret", + "April", + "Mei", + "Juni", + "Juli", + "Agustus", + "September", + "Oktober", + "November", + "Desember", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "Mei", + "Jun", + "Jul", + "Ags", + "Sept", + "Okt", + "Nov", + "Des", + ] + + day_names = ["", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu", "Minggu"] + + day_abbreviations = [ + "", + "Senin", + "Selasa", + "Rabu", + "Kamis", + "Jumat", + "Sabtu", + "Minggu", + ] + + +class NepaliLocale(Locale): + names = ["ne", "ne_np"] + + past = "{0} पहिले" + future = "{0} पछी" + + timeframes = { + "now": "अहिले", + "seconds": "सेकण्ड", + "minute": "मिनेट", + "minutes": "{0} मिनेट", + "hour": "एक घण्टा", + "hours": "{0} घण्टा", + "day": "एक दिन", + "days": "{0} दिन", + "month": "एक महिना", + "months": "{0} महिना", + "year": "एक बर्ष", + "years": "बर्ष", + } + + meridians = {"am": "पूर्वाह्न", "pm": "अपरान्ह", "AM": "पूर्वाह्न", "PM": "अपरान्ह"} + + month_names = [ + "", + "जनवरी", + "फेब्रुअरी", + "मार्च", + "एप्रील", + "मे", + "जुन", + "जुलाई", + "अगष्ट", + "सेप्टेम्बर", + "अक्टोबर", + "नोवेम्बर", + "डिसेम्बर", + ] + month_abbreviations = [ + "", + "जन", + "फेब", + "मार्च", + "एप्रील", + "मे", + "जुन", + "जुलाई", + "अग", + "सेप", + "अक्ट", + "नोव", + "डिस", + ] + + day_names = [ + "", + "सोमवार", + "मंगलवार", + "बुधवार", + "बिहिवार", + "शुक्रवार", + "शनिवार", + "आइतवार", + ] + + day_abbreviations = ["", "सोम", "मंगल", "बुध", "बिहि", "शुक्र", "शनि", "आइत"] + + +class EstonianLocale(Locale): + names = ["ee", "et"] + + past = "{0} tagasi" + future = "{0} pärast" + + timeframes = { + "now": {"past": "just nüüd", "future": "just nüüd"}, + "second": {"past": "üks sekund", "future": "ühe sekundi"}, + "seconds": {"past": "{0} sekundit", "future": "{0} sekundi"}, + "minute": {"past": "üks minut", "future": "ühe minuti"}, + "minutes": {"past": "{0} minutit", "future": "{0} minuti"}, + "hour": {"past": "tund aega", "future": "tunni aja"}, + "hours": {"past": "{0} tundi", "future": "{0} tunni"}, + "day": {"past": "üks päev", "future": "ühe päeva"}, + "days": {"past": "{0} päeva", "future": "{0} päeva"}, + "month": {"past": "üks kuu", "future": "ühe kuu"}, + "months": {"past": "{0} kuud", "future": "{0} kuu"}, + "year": {"past": "üks aasta", "future": "ühe aasta"}, + "years": {"past": "{0} aastat", "future": "{0} aasta"}, + } + + month_names = [ + "", + "Jaanuar", + "Veebruar", + "Märts", + "Aprill", + "Mai", + "Juuni", + "Juuli", + "August", + "September", + "Oktoober", + "November", + "Detsember", + ] + month_abbreviations = [ + "", + "Jan", + "Veb", + "Mär", + "Apr", + "Mai", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Dets", + ] + + day_names = [ + "", + "Esmaspäev", + "Teisipäev", + "Kolmapäev", + "Neljapäev", + "Reede", + "Laupäev", + "Pühapäev", + ] + day_abbreviations = ["", "Esm", "Teis", "Kolm", "Nelj", "Re", "Lau", "Püh"] + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + if delta > 0: + form = form["future"] + else: + form = form["past"] + return form.format(abs(delta)) + + +_locales = _map_locales() diff --git a/py3/lib/python3.6/site-packages/arrow/parser.py b/py3/lib/python3.6/site-packages/arrow/parser.py new file mode 100644 index 0000000..7266ff9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/parser.py @@ -0,0 +1,364 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import re +from datetime import datetime + +from dateutil import tz + +from arrow import locales + +try: + from functools import lru_cache +except ImportError: # pragma: no cover + from backports.functools_lru_cache import lru_cache # pragma: no cover + + +class ParserError(RuntimeError): + pass + + +class DateTimeParser(object): + + _FORMAT_RE = re.compile( + r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|X)" + ) + _ESCAPE_RE = re.compile(r"\[[^\[\]]*\]") + + _ONE_OR_MORE_DIGIT_RE = re.compile(r"\d+") + _ONE_OR_TWO_DIGIT_RE = re.compile(r"\d{1,2}") + _FOUR_DIGIT_RE = re.compile(r"\d{4}") + _TWO_DIGIT_RE = re.compile(r"\d{2}") + _TZ_RE = re.compile(r"[+\-]?\d{2}:?(\d{2})?") + _TZ_NAME_RE = re.compile(r"\w[\w+\-/]+") + + _BASE_INPUT_RE_MAP = { + "YYYY": _FOUR_DIGIT_RE, + "YY": _TWO_DIGIT_RE, + "MM": _TWO_DIGIT_RE, + "M": _ONE_OR_TWO_DIGIT_RE, + "DD": _TWO_DIGIT_RE, + "D": _ONE_OR_TWO_DIGIT_RE, + "HH": _TWO_DIGIT_RE, + "H": _ONE_OR_TWO_DIGIT_RE, + "hh": _TWO_DIGIT_RE, + "h": _ONE_OR_TWO_DIGIT_RE, + "mm": _TWO_DIGIT_RE, + "m": _ONE_OR_TWO_DIGIT_RE, + "ss": _TWO_DIGIT_RE, + "s": _ONE_OR_TWO_DIGIT_RE, + "X": re.compile(r"\d+"), + "ZZZ": _TZ_NAME_RE, + "ZZ": _TZ_RE, + "Z": _TZ_RE, + "S": _ONE_OR_MORE_DIGIT_RE, + } + + MARKERS = ["YYYY", "MM", "DD"] + SEPARATORS = ["-", "/", "."] + + def __init__(self, locale="en_us", cache_size=0): + + self.locale = locales.get_locale(locale) + self._input_re_map = self._BASE_INPUT_RE_MAP.copy() + self._input_re_map.update( + { + "MMMM": self._choice_re(self.locale.month_names[1:], re.IGNORECASE), + "MMM": self._choice_re( + self.locale.month_abbreviations[1:], re.IGNORECASE + ), + "Do": re.compile(self.locale.ordinal_day_re), + "dddd": self._choice_re(self.locale.day_names[1:], re.IGNORECASE), + "ddd": self._choice_re( + self.locale.day_abbreviations[1:], re.IGNORECASE + ), + "d": re.compile(r"[1-7]"), + "a": self._choice_re( + (self.locale.meridians["am"], self.locale.meridians["pm"]) + ), + # note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to + # ensure backwards compatibility of this token + "A": self._choice_re(self.locale.meridians.values()), + } + ) + if cache_size > 0: + self._generate_pattern_re = lru_cache(maxsize=cache_size)( + self._generate_pattern_re + ) + + def parse_iso(self, string): + + has_time = "T" in string or " " in string.strip() + space_divider = " " in string.strip() + + if has_time: + if space_divider: + date_string, time_string = string.split(" ", 1) + else: + date_string, time_string = string.split("T", 1) + time_parts = re.split("[+-]", time_string, 1) + has_tz = len(time_parts) > 1 + has_seconds = time_parts[0].count(":") > 1 + has_subseconds = re.search("[.,]", time_parts[0]) + + if has_subseconds: + formats = ["YYYY-MM-DDTHH:mm:ss%sS" % has_subseconds.group()] + elif has_seconds: + formats = ["YYYY-MM-DDTHH:mm:ss"] + else: + formats = ["YYYY-MM-DDTHH:mm"] + else: + has_tz = False + # generate required formats: YYYY-MM-DD, YYYY-MM-DD, YYYY + # using various separators: -, /, . + len_markers = len(self.MARKERS) + formats = [ + separator.join(self.MARKERS[: len_markers - i]) + for i in range(len_markers) + for separator in self.SEPARATORS + ] + + if has_time and has_tz: + formats = [f + "Z" for f in formats] + + if space_divider: + formats = [item.replace("T", " ", 1) for item in formats] + + return self._parse_multiformat(string, formats) + + def _generate_pattern_re(self, fmt): + + # fmt is a string of tokens like 'YYYY-MM-DD' + # we construct a new string by replacing each + # token by its pattern: + # 'YYYY-MM-DD' -> '(?P\d{4})-(?P\d{2})-(?P
\d{2})' + tokens = [] + offset = 0 + + # Escape all special RegEx chars + escaped_fmt = re.escape(fmt) + + # Extract the bracketed expressions to be reinserted later. + escaped_fmt = re.sub(self._ESCAPE_RE, "#", escaped_fmt) + # Any number of S is the same as one. + escaped_fmt = re.sub("S+", "S", escaped_fmt) + escaped_data = re.findall(self._ESCAPE_RE, fmt) + + fmt_pattern = escaped_fmt + + for m in self._FORMAT_RE.finditer(escaped_fmt): + token = m.group(0) + try: + input_re = self._input_re_map[token] + except KeyError: + raise ParserError("Unrecognized token '{}'".format(token)) + input_pattern = "(?P<{}>{})".format(token, input_re.pattern) + tokens.append(token) + # a pattern doesn't have the same length as the token + # it replaces! We keep the difference in the offset variable. + # This works because the string is scanned left-to-right and matches + # are returned in the order found by finditer. + fmt_pattern = ( + fmt_pattern[: m.start() + offset] + + input_pattern + + fmt_pattern[m.end() + offset :] + ) + offset += len(input_pattern) - (m.end() - m.start()) + + final_fmt_pattern = "" + a = fmt_pattern.split(r"\#") + b = escaped_data + + # Due to the way Python splits, 'a' will always be longer + for i in range(len(a)): + final_fmt_pattern += a[i] + if i < len(b): + final_fmt_pattern += b[i][1:-1] + + return tokens, re.compile(final_fmt_pattern, flags=re.IGNORECASE) + + def parse(self, string, fmt): + + if isinstance(fmt, list): + return self._parse_multiformat(string, fmt) + + fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt) + + match = fmt_pattern_re.search(string) + if match is None: + raise ParserError( + "Failed to match '{}' when parsing '{}'".format( + fmt_pattern_re.pattern, string + ) + ) + parts = {} + for token in fmt_tokens: + if token == "Do": + value = match.group("value") + else: + value = match.group(token) + self._parse_token(token, value, parts) + return self._build_datetime(parts) + + def _parse_token(self, token, value, parts): + + if token == "YYYY": + parts["year"] = int(value) + elif token == "YY": + value = int(value) + parts["year"] = 1900 + value if value > 68 else 2000 + value + + elif token in ["MMMM", "MMM"]: + parts["month"] = self.locale.month_number(value.lower()) + + elif token in ["MM", "M"]: + parts["month"] = int(value) + + elif token in ["DD", "D"]: + parts["day"] = int(value) + + elif token in ["Do"]: + parts["day"] = int(value) + + elif token.upper() in ["HH", "H"]: + parts["hour"] = int(value) + + elif token in ["mm", "m"]: + parts["minute"] = int(value) + + elif token in ["ss", "s"]: + parts["second"] = int(value) + + elif token == "S": + # We have the *most significant* digits of an arbitrary-precision integer. + # We want the six most significant digits as an integer, rounded. + # FIXME: add nanosecond support somehow? + value = value.ljust(7, str("0")) + + # floating-point (IEEE-754) defaults to half-to-even rounding + seventh_digit = int(value[6]) + if seventh_digit == 5: + rounding = int(value[5]) % 2 + elif seventh_digit > 5: + rounding = 1 + else: + rounding = 0 + + parts["microsecond"] = int(value[:6]) + rounding + + elif token == "X": + parts["timestamp"] = int(value) + + elif token in ["ZZZ", "ZZ", "Z"]: + parts["tzinfo"] = TzinfoParser.parse(value) + + elif token in ["a", "A"]: + if value in (self.locale.meridians["am"], self.locale.meridians["AM"]): + parts["am_pm"] = "am" + elif value in (self.locale.meridians["pm"], self.locale.meridians["PM"]): + parts["am_pm"] = "pm" + + @staticmethod + def _build_datetime(parts): + + timestamp = parts.get("timestamp") + + if timestamp: + tz_utc = tz.tzutc() + return datetime.fromtimestamp(timestamp, tz=tz_utc) + + am_pm = parts.get("am_pm") + hour = parts.get("hour", 0) + + if am_pm == "pm" and hour < 12: + hour += 12 + elif am_pm == "am" and hour == 12: + hour = 0 + + return datetime( + year=parts.get("year", 1), + month=parts.get("month", 1), + day=parts.get("day", 1), + hour=hour, + minute=parts.get("minute", 0), + second=parts.get("second", 0), + microsecond=parts.get("microsecond", 0), + tzinfo=parts.get("tzinfo"), + ) + + def _parse_multiformat(self, string, formats): + + _datetime = None + + for fmt in formats: + try: + _datetime = self.parse(string, fmt) + break + except ParserError: + pass + + if _datetime is None: + raise ParserError( + "Could not match input to any of {} on '{}'".format(formats, string) + ) + + return _datetime + + @staticmethod + def _map_lookup(input_map, key): + + try: + return input_map[key] + except KeyError: + raise ParserError('Could not match "{}" to {}'.format(key, input_map)) + + @staticmethod + def _try_timestamp(string): + + try: + return float(string) + except Exception: + return None + + @staticmethod + def _choice_re(choices, flags=0): + return re.compile(r"({})".format("|".join(choices)), flags=flags) + + +class TzinfoParser(object): + + _TZINFO_RE = re.compile(r"([+\-])?(\d\d):?(\d\d)?") + + @classmethod + def parse(cls, string): + + tzinfo = None + + if string == "local": + tzinfo = tz.tzlocal() + + elif string in ["utc", "UTC"]: + tzinfo = tz.tzutc() + + else: + + iso_match = cls._TZINFO_RE.match(string) + + if iso_match: + sign, hours, minutes = iso_match.groups() + if minutes is None: + minutes = 0 + seconds = int(hours) * 3600 + int(minutes) * 60 + + if sign == "-": + seconds *= -1 + + tzinfo = tz.tzoffset(None, seconds) + + else: + tzinfo = tz.gettz(string) + + if tzinfo is None: + raise ParserError('Could not parse timezone expression "{}"'.format(string)) + + return tzinfo diff --git a/py3/lib/python3.6/site-packages/arrow/util.py b/py3/lib/python3.6/site-packages/arrow/util.py new file mode 100644 index 0000000..03132f7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/arrow/util.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import sys +import warnings + + +def total_seconds(td): # pragma: no cover + return td.total_seconds() + + +def is_timestamp(value): + if type(value) == bool: + return False + try: + float(value) + return True + except Exception: + return False + + +# Python 2.7 / 3.0+ definitions for isstr function. + +try: # pragma: no cover + basestring + + def isstr(s): + return isinstance(s, basestring) # noqa: F821 + + +except NameError: # pragma: no cover + + def isstr(s): + return isinstance(s, str) + + +class list_to_iter_shim(list): + """ A temporary shim for functions that currently return a list but that will, after a + deprecation period, return an iteratator. + """ + + def __init__(self, iterable=(), **kwargs): + """ Equivalent to list(iterable). warn_text will be emitted on all non-iterator operations. + """ + self._warn_text = ( + kwargs.pop("warn_text", None) + or "this object will be converted to an iterator in a future release" + ) + self._iter_count = 0 + list.__init__(self, iterable, **kwargs) + + def _warn(self): + warnings.warn(self._warn_text, DeprecationWarning) + + def __iter__(self): + self._iter_count += 1 + if self._iter_count > 1: + self._warn() + return list.__iter__(self) + + def _wrap_method(name): + list_func = getattr(list, name) + + def wrapper(self, *args, **kwargs): + self._warn() + return list_func(self, *args, **kwargs) + + return wrapper + + __contains__ = _wrap_method("__contains__") + __add__ = _wrap_method("__add__") + __mul__ = _wrap_method("__mul__") + __getitem__ = _wrap_method("__getitem__") + # Ideally, we would throw warnings from __len__, but list(x) calls len(x) + index = _wrap_method("index") + count = _wrap_method("count") + __setitem__ = _wrap_method("__setitem__") + __delitem__ = _wrap_method("__delitem__") + append = _wrap_method("append") + if sys.version_info.major >= 3: # pragma: no cover + clear = _wrap_method("clear") + copy = _wrap_method("copy") + extend = _wrap_method("extend") + __iadd__ = _wrap_method("__iadd__") + __imul__ = _wrap_method("__imul__") + insert = _wrap_method("insert") + pop = _wrap_method("pop") + remove = _wrap_method("remove") + reverse = _wrap_method("reverse") + sort = _wrap_method("sort") + + del _wrap_method + + +__all__ = ["total_seconds", "is_timestamp", "isstr", "list_to_iter_shim"] diff --git a/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/AUTHORS b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/AUTHORS new file mode 100644 index 0000000..ee0dd62 --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/AUTHORS @@ -0,0 +1,8 @@ +Blinker was originally written by Jason Kirtland. + +Contributors are: + +- Jason Kirtland + +Blinker includes code from Louie, by Patrick K. O'Brien, Mike +C. Fletcher, and Matthew R. Scott. diff --git a/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/LICENSE b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/LICENSE new file mode 100644 index 0000000..88f6e84 --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) The Blinker authors and contributors + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/METADATA b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/METADATA new file mode 100644 index 0000000..25cb9f2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: blinker +Version: 1.4 +Summary: Fast, simple object-to-object and broadcast signaling +Home-page: http://pythonhosted.org/blinker/ +Author: Jason Kirtland +Author-email: jek@discorporate.us +License: MIT License +Keywords: signal emit events broadcast +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.4 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.0 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities + +[![Build Status](https://travis-ci.org/jek/blinker.svg?branch=master)](https://travis-ci.org/jek/blinker) + + +# Blinker + +Blinker provides a fast dispatching system that allows any number of +interested parties to subscribe to events, or "signals". + +Signal receivers can subscribe to specific senders or receive signals +sent by any sender. + + >>> from blinker import signal + >>> started = signal('round-started') + >>> def each(round): + ... print "Round %s!" % round + ... + >>> started.connect(each) + + >>> def round_two(round): + ... print "This is round two." + ... + >>> started.connect(round_two, sender=2) + + >>> for round in range(1, 4): + ... started.send(round) + ... + Round 1! + Round 2! + This is round two. + Round 3! + +See the [Blinker documentation](https://pythonhosted.org/blinker/) for more information. + +## Requirements + +Blinker requires Python 2.4 or higher, Python 3.0 or higher, or Jython 2.5 or higher. + +## Changelog Summary + +1.3 (July 3, 2013) + + - The global signal stash behind blinker.signal() is now backed by a + regular name-to-Signal dictionary. Previously, weak references were + held in the mapping and ephemeral usage in code like + ``signal('foo').connect(...)`` could have surprising program behavior + depending on import order of modules. + - blinker.Namespace is now built on a regular dict. Use + blinker.WeakNamespace for the older, weak-referencing behavior. + - Signal.connect('text-sender') uses an alternate hashing strategy to + avoid sharp edges in text identity. + +1.2 (October 26, 2011) + + - Added Signal.receiver_connected and Signal.receiver_disconnected + per-Signal signals. + - Deprecated the global 'receiver_connected' signal. + - Verified Python 3.2 support (no changes needed!) + +1.1 (July 21, 2010) + + - Added ``@signal.connect_via(sender)`` decorator + - Added ``signal.connected_to`` shorthand name for the + ``temporarily_connected_to`` context manager. + +1.0 (March 28, 2010) + + - Python 3.x compatibility + +0.9 (February 26, 2010) + + - Sphinx docs, project website + - Added ``with a_signal.temporarily_connected_to(receiver): ...`` support + + diff --git a/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/RECORD b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/RECORD new file mode 100644 index 0000000..24e5522 --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/RECORD @@ -0,0 +1,15 @@ +blinker/__init__.py,sha256=vgkMDX61C3h5jhFvxiGJ_FCkajPLZi-KzZr_AAfV-_0,300 +blinker/_saferef.py,sha256=OprBfoWWJd791Qm9OY5c-v1bIo2qLalHah8hqAA18Sw,9223 +blinker/_utilities.py,sha256=ev0IgdfH4wMSmLO-J3gVZSc-umhhmJiQJWdQNy6lyTY,4457 +blinker/base.py,sha256=MJ_qn4ladUGMAU2M1k2OIPfgUi_INhbqfLrvjdsmHVM,16319 +blinker-1.4.dist-info/AUTHORS,sha256=f2tPqeT2VyCL9D9YelVKScG65gDrGzbMES0zp7b0p48,207 +blinker-1.4.dist-info/LICENSE,sha256=7pmpCeGwT1Pz1JOD5kYromjPSTiLs6ZXRV2snjjN8Oo,1094 +blinker-1.4.dist-info/METADATA,sha256=xjofN2QBb0zejTtGF8S000R3eRXw5uGzdG15LjEy3ak,3308 +blinker-1.4.dist-info/WHEEL,sha256=MTT8nte0XKevE9yJl-TwoUKDw_i34GiAL_IxFfS6kr8,93 +blinker-1.4.dist-info/top_level.txt,sha256=2NmsENM0J2t9Z8mkjxHDmGMQj7Bm8f5ZTTYe1x1fZtM,8 +blinker-1.4.dist-info/RECORD,, +blinker-1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +blinker/__pycache__/base.cpython-36.pyc,, +blinker/__pycache__/_saferef.cpython-36.pyc,, +blinker/__pycache__/_utilities.cpython-36.pyc,, +blinker/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/WHEEL b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/WHEEL new file mode 100644 index 0000000..5b3c6fb --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.3) +Root-Is-Purelib: true +Tag: cp36-none-any + diff --git a/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/top_level.txt new file mode 100644 index 0000000..1ff4ca5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker-1.4.dist-info/top_level.txt @@ -0,0 +1 @@ +blinker diff --git a/py3/lib/python3.6/site-packages/blinker/__init__.py b/py3/lib/python3.6/site-packages/blinker/__init__.py new file mode 100644 index 0000000..3ea239c --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker/__init__.py @@ -0,0 +1,22 @@ +from blinker.base import ( + ANY, + NamedSignal, + Namespace, + Signal, + WeakNamespace, + receiver_connected, + signal, +) + +__all__ = [ + 'ANY', + 'NamedSignal', + 'Namespace', + 'Signal', + 'WeakNamespace', + 'receiver_connected', + 'signal', + ] + + +__version__ = '1.4' diff --git a/py3/lib/python3.6/site-packages/blinker/_saferef.py b/py3/lib/python3.6/site-packages/blinker/_saferef.py new file mode 100644 index 0000000..269e362 --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker/_saferef.py @@ -0,0 +1,234 @@ +# extracted from Louie, http://pylouie.org/ +# updated for Python 3 +# +# Copyright (c) 2006 Patrick K. O'Brien, Mike C. Fletcher, +# Matthew R. Scott +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +"""Refactored 'safe reference from dispatcher.py""" + +import operator +import sys +import traceback +import weakref + + +try: + callable +except NameError: + def callable(object): + return hasattr(object, '__call__') + + +if sys.version_info < (3,): + get_self = operator.attrgetter('im_self') + get_func = operator.attrgetter('im_func') +else: + get_self = operator.attrgetter('__self__') + get_func = operator.attrgetter('__func__') + + +def safe_ref(target, on_delete=None): + """Return a *safe* weak reference to a callable target. + + - ``target``: The object to be weakly referenced, if it's a bound + method reference, will create a BoundMethodWeakref, otherwise + creates a simple weakref. + + - ``on_delete``: If provided, will have a hard reference stored to + the callable to be called after the safe reference goes out of + scope with the reference object, (either a weakref or a + BoundMethodWeakref) as argument. + """ + try: + im_self = get_self(target) + except AttributeError: + if callable(on_delete): + return weakref.ref(target, on_delete) + else: + return weakref.ref(target) + else: + if im_self is not None: + # Turn a bound method into a BoundMethodWeakref instance. + # Keep track of these instances for lookup by disconnect(). + assert hasattr(target, 'im_func') or hasattr(target, '__func__'), ( + "safe_ref target %r has im_self, but no im_func, " + "don't know how to create reference" % target) + reference = BoundMethodWeakref(target=target, on_delete=on_delete) + return reference + + +class BoundMethodWeakref(object): + """'Safe' and reusable weak references to instance methods. + + BoundMethodWeakref objects provide a mechanism for referencing a + bound method without requiring that the method object itself + (which is normally a transient object) is kept alive. Instead, + the BoundMethodWeakref object keeps weak references to both the + object and the function which together define the instance method. + + Attributes: + + - ``key``: The identity key for the reference, calculated by the + class's calculate_key method applied to the target instance method. + + - ``deletion_methods``: Sequence of callable objects taking single + argument, a reference to this object which will be called when + *either* the target object or target function is garbage + collected (i.e. when this object becomes invalid). These are + specified as the on_delete parameters of safe_ref calls. + + - ``weak_self``: Weak reference to the target object. + + - ``weak_func``: Weak reference to the target function. + + Class Attributes: + + - ``_all_instances``: Class attribute pointing to all live + BoundMethodWeakref objects indexed by the class's + calculate_key(target) method applied to the target objects. + This weak value dictionary is used to short-circuit creation so + that multiple references to the same (object, function) pair + produce the same BoundMethodWeakref instance. + """ + + _all_instances = weakref.WeakValueDictionary() + + def __new__(cls, target, on_delete=None, *arguments, **named): + """Create new instance or return current instance. + + Basically this method of construction allows us to + short-circuit creation of references to already- referenced + instance methods. The key corresponding to the target is + calculated, and if there is already an existing reference, + that is returned, with its deletion_methods attribute updated. + Otherwise the new instance is created and registered in the + table of already-referenced methods. + """ + key = cls.calculate_key(target) + current = cls._all_instances.get(key) + if current is not None: + current.deletion_methods.append(on_delete) + return current + else: + base = super(BoundMethodWeakref, cls).__new__(cls) + cls._all_instances[key] = base + base.__init__(target, on_delete, *arguments, **named) + return base + + def __init__(self, target, on_delete=None): + """Return a weak-reference-like instance for a bound method. + + - ``target``: The instance-method target for the weak reference, + must have im_self and im_func attributes and be + reconstructable via the following, which is true of built-in + instance methods:: + + target.im_func.__get__( target.im_self ) + + - ``on_delete``: Optional callback which will be called when + this weak reference ceases to be valid (i.e. either the + object or the function is garbage collected). Should take a + single argument, which will be passed a pointer to this + object. + """ + def remove(weak, self=self): + """Set self.isDead to True when method or instance is destroyed.""" + methods = self.deletion_methods[:] + del self.deletion_methods[:] + try: + del self.__class__._all_instances[self.key] + except KeyError: + pass + for function in methods: + try: + if callable(function): + function(self) + except Exception: + try: + traceback.print_exc() + except AttributeError: + e = sys.exc_info()[1] + print ('Exception during saferef %s ' + 'cleanup function %s: %s' % (self, function, e)) + self.deletion_methods = [on_delete] + self.key = self.calculate_key(target) + im_self = get_self(target) + im_func = get_func(target) + self.weak_self = weakref.ref(im_self, remove) + self.weak_func = weakref.ref(im_func, remove) + self.self_name = str(im_self) + self.func_name = str(im_func.__name__) + + def calculate_key(cls, target): + """Calculate the reference key for this reference. + + Currently this is a two-tuple of the id()'s of the target + object and the target function respectively. + """ + return (id(get_self(target)), id(get_func(target))) + calculate_key = classmethod(calculate_key) + + def __str__(self): + """Give a friendly representation of the object.""" + return "%s(%s.%s)" % ( + self.__class__.__name__, + self.self_name, + self.func_name, + ) + + __repr__ = __str__ + + def __nonzero__(self): + """Whether we are still a valid reference.""" + return self() is not None + + def __cmp__(self, other): + """Compare with another reference.""" + if not isinstance(other, self.__class__): + return cmp(self.__class__, type(other)) + return cmp(self.key, other.key) + + def __call__(self): + """Return a strong reference to the bound method. + + If the target cannot be retrieved, then will return None, + otherwise returns a bound instance method for our object and + function. + + Note: You may call this method any number of times, as it does + not invalidate the reference. + """ + target = self.weak_self() + if target is not None: + function = self.weak_func() + if function is not None: + return function.__get__(target) + return None diff --git a/py3/lib/python3.6/site-packages/blinker/_utilities.py b/py3/lib/python3.6/site-packages/blinker/_utilities.py new file mode 100644 index 0000000..056270d --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker/_utilities.py @@ -0,0 +1,163 @@ +from weakref import ref + +from blinker._saferef import BoundMethodWeakref + + +try: + callable +except NameError: + def callable(object): + return hasattr(object, '__call__') + + +try: + from collections import defaultdict +except: + class defaultdict(dict): + + def __init__(self, default_factory=None, *a, **kw): + if (default_factory is not None and + not hasattr(default_factory, '__call__')): + raise TypeError('first argument must be callable') + dict.__init__(self, *a, **kw) + self.default_factory = default_factory + + def __getitem__(self, key): + try: + return dict.__getitem__(self, key) + except KeyError: + return self.__missing__(key) + + def __missing__(self, key): + if self.default_factory is None: + raise KeyError(key) + self[key] = value = self.default_factory() + return value + + def __reduce__(self): + if self.default_factory is None: + args = tuple() + else: + args = self.default_factory, + return type(self), args, None, None, self.items() + + def copy(self): + return self.__copy__() + + def __copy__(self): + return type(self)(self.default_factory, self) + + def __deepcopy__(self, memo): + import copy + return type(self)(self.default_factory, + copy.deepcopy(self.items())) + + def __repr__(self): + return 'defaultdict(%s, %s)' % (self.default_factory, + dict.__repr__(self)) + + +try: + from contextlib import contextmanager +except ImportError: + def contextmanager(fn): + def oops(*args, **kw): + raise RuntimeError("Python 2.5 or above is required to use " + "context managers.") + oops.__name__ = fn.__name__ + return oops + +class _symbol(object): + + def __init__(self, name): + """Construct a new named symbol.""" + self.__name__ = self.name = name + + def __reduce__(self): + return symbol, (self.name,) + + def __repr__(self): + return self.name +_symbol.__name__ = 'symbol' + + +class symbol(object): + """A constant symbol. + + >>> symbol('foo') is symbol('foo') + True + >>> symbol('foo') + foo + + A slight refinement of the MAGICCOOKIE=object() pattern. The primary + advantage of symbol() is its repr(). They are also singletons. + + Repeated calls of symbol('name') will all return the same instance. + + """ + symbols = {} + + def __new__(cls, name): + try: + return cls.symbols[name] + except KeyError: + return cls.symbols.setdefault(name, _symbol(name)) + + +try: + text = (str, unicode) +except NameError: + text = str + + +def hashable_identity(obj): + if hasattr(obj, '__func__'): + return (id(obj.__func__), id(obj.__self__)) + elif hasattr(obj, 'im_func'): + return (id(obj.im_func), id(obj.im_self)) + elif isinstance(obj, text): + return obj + else: + return id(obj) + + +WeakTypes = (ref, BoundMethodWeakref) + + +class annotatable_weakref(ref): + """A weakref.ref that supports custom instance attributes.""" + + +def reference(object, callback=None, **annotations): + """Return an annotated weak ref.""" + if callable(object): + weak = callable_reference(object, callback) + else: + weak = annotatable_weakref(object, callback) + for key, value in annotations.items(): + setattr(weak, key, value) + return weak + + +def callable_reference(object, callback=None): + """Return an annotated weak ref, supporting bound instance methods.""" + if hasattr(object, 'im_self') and object.im_self is not None: + return BoundMethodWeakref(target=object, on_delete=callback) + elif hasattr(object, '__self__') and object.__self__ is not None: + return BoundMethodWeakref(target=object, on_delete=callback) + return annotatable_weakref(object, callback) + + +class lazy_property(object): + """A @property that is only evaluated once.""" + + def __init__(self, deferred): + self._deferred = deferred + self.__doc__ = deferred.__doc__ + + def __get__(self, obj, cls): + if obj is None: + return self + value = self._deferred(obj) + setattr(obj, self._deferred.__name__, value) + return value diff --git a/py3/lib/python3.6/site-packages/blinker/base.py b/py3/lib/python3.6/site-packages/blinker/base.py new file mode 100644 index 0000000..cc5880e --- /dev/null +++ b/py3/lib/python3.6/site-packages/blinker/base.py @@ -0,0 +1,455 @@ +# -*- coding: utf-8; fill-column: 76 -*- +"""Signals and events. + +A small implementation of signals, inspired by a snippet of Django signal +API client code seen in a blog post. Signals are first-class objects and +each manages its own receivers and message emission. + +The :func:`signal` function provides singleton behavior for named signals. + +""" +from warnings import warn +from weakref import WeakValueDictionary + +from blinker._utilities import ( + WeakTypes, + contextmanager, + defaultdict, + hashable_identity, + lazy_property, + reference, + symbol, + ) + + +ANY = symbol('ANY') +ANY.__doc__ = 'Token for "any sender".' +ANY_ID = 0 + + +class Signal(object): + """A notification emitter.""" + + #: An :obj:`ANY` convenience synonym, allows ``Signal.ANY`` + #: without an additional import. + ANY = ANY + + @lazy_property + def receiver_connected(self): + """Emitted after each :meth:`connect`. + + The signal sender is the signal instance, and the :meth:`connect` + arguments are passed through: *receiver*, *sender*, and *weak*. + + .. versionadded:: 1.2 + + """ + return Signal(doc="Emitted after a receiver connects.") + + @lazy_property + def receiver_disconnected(self): + """Emitted after :meth:`disconnect`. + + The sender is the signal instance, and the :meth:`disconnect` arguments + are passed through: *receiver* and *sender*. + + Note, this signal is emitted **only** when :meth:`disconnect` is + called explicitly. + + The disconnect signal can not be emitted by an automatic disconnect + (due to a weakly referenced receiver or sender going out of scope), + as the receiver and/or sender instances are no longer available for + use at the time this signal would be emitted. + + An alternative approach is available by subscribing to + :attr:`receiver_connected` and setting up a custom weakref cleanup + callback on weak receivers and senders. + + .. versionadded:: 1.2 + + """ + return Signal(doc="Emitted after a receiver disconnects.") + + def __init__(self, doc=None): + """ + :param doc: optional. If provided, will be assigned to the signal's + __doc__ attribute. + + """ + if doc: + self.__doc__ = doc + #: A mapping of connected receivers. + #: + #: The values of this mapping are not meaningful outside of the + #: internal :class:`Signal` implementation, however the boolean value + #: of the mapping is useful as an extremely efficient check to see if + #: any receivers are connected to the signal. + self.receivers = {} + self._by_receiver = defaultdict(set) + self._by_sender = defaultdict(set) + self._weak_senders = {} + + def connect(self, receiver, sender=ANY, weak=True): + """Connect *receiver* to signal events sent by *sender*. + + :param receiver: A callable. Will be invoked by :meth:`send` with + `sender=` as a single positional argument and any \*\*kwargs that + were provided to a call to :meth:`send`. + + :param sender: Any object or :obj:`ANY`, defaults to ``ANY``. + Restricts notifications delivered to *receiver* to only those + :meth:`send` emissions sent by *sender*. If ``ANY``, the receiver + will always be notified. A *receiver* may be connected to + multiple *sender* values on the same Signal through multiple calls + to :meth:`connect`. + + :param weak: If true, the Signal will hold a weakref to *receiver* + and automatically disconnect when *receiver* goes out of scope or + is garbage collected. Defaults to True. + + """ + receiver_id = hashable_identity(receiver) + if weak: + receiver_ref = reference(receiver, self._cleanup_receiver) + receiver_ref.receiver_id = receiver_id + else: + receiver_ref = receiver + if sender is ANY: + sender_id = ANY_ID + else: + sender_id = hashable_identity(sender) + + self.receivers.setdefault(receiver_id, receiver_ref) + self._by_sender[sender_id].add(receiver_id) + self._by_receiver[receiver_id].add(sender_id) + del receiver_ref + + if sender is not ANY and sender_id not in self._weak_senders: + # wire together a cleanup for weakref-able senders + try: + sender_ref = reference(sender, self._cleanup_sender) + sender_ref.sender_id = sender_id + except TypeError: + pass + else: + self._weak_senders.setdefault(sender_id, sender_ref) + del sender_ref + + # broadcast this connection. if receivers raise, disconnect. + if ('receiver_connected' in self.__dict__ and + self.receiver_connected.receivers): + try: + self.receiver_connected.send(self, + receiver=receiver, + sender=sender, + weak=weak) + except: + self.disconnect(receiver, sender) + raise + if receiver_connected.receivers and self is not receiver_connected: + try: + receiver_connected.send(self, + receiver_arg=receiver, + sender_arg=sender, + weak_arg=weak) + except: + self.disconnect(receiver, sender) + raise + return receiver + + def connect_via(self, sender, weak=False): + """Connect the decorated function as a receiver for *sender*. + + :param sender: Any object or :obj:`ANY`. The decorated function + will only receive :meth:`send` emissions sent by *sender*. If + ``ANY``, the receiver will always be notified. A function may be + decorated multiple times with differing *sender* values. + + :param weak: If true, the Signal will hold a weakref to the + decorated function and automatically disconnect when *receiver* + goes out of scope or is garbage collected. Unlike + :meth:`connect`, this defaults to False. + + The decorated function will be invoked by :meth:`send` with + `sender=` as a single positional argument and any \*\*kwargs that + were provided to the call to :meth:`send`. + + + .. versionadded:: 1.1 + + """ + def decorator(fn): + self.connect(fn, sender, weak) + return fn + return decorator + + @contextmanager + def connected_to(self, receiver, sender=ANY): + """Execute a block with the signal temporarily connected to *receiver*. + + :param receiver: a receiver callable + :param sender: optional, a sender to filter on + + This is a context manager for use in the ``with`` statement. It can + be useful in unit tests. *receiver* is connected to the signal for + the duration of the ``with`` block, and will be disconnected + automatically when exiting the block: + + .. testsetup:: + + from __future__ import with_statement + from blinker import Signal + on_ready = Signal() + receiver = lambda sender: None + + .. testcode:: + + with on_ready.connected_to(receiver): + # do stuff + on_ready.send(123) + + .. versionadded:: 1.1 + + """ + self.connect(receiver, sender=sender, weak=False) + try: + yield None + except: + self.disconnect(receiver) + raise + else: + self.disconnect(receiver) + + def temporarily_connected_to(self, receiver, sender=ANY): + """An alias for :meth:`connected_to`. + + :param receiver: a receiver callable + :param sender: optional, a sender to filter on + + .. versionadded:: 0.9 + + .. versionchanged:: 1.1 + Renamed to :meth:`connected_to`. ``temporarily_connected_to`` was + deprecated in 1.2 and will be removed in a subsequent version. + + """ + warn("temporarily_connected_to is deprecated; " + "use connected_to instead.", + DeprecationWarning) + return self.connected_to(receiver, sender) + + def send(self, *sender, **kwargs): + """Emit this signal on behalf of *sender*, passing on \*\*kwargs. + + Returns a list of 2-tuples, pairing receivers with their return + value. The ordering of receiver notification is undefined. + + :param \*sender: Any object or ``None``. If omitted, synonymous + with ``None``. Only accepts one positional argument. + + :param \*\*kwargs: Data to be sent to receivers. + + """ + # Using '*sender' rather than 'sender=None' allows 'sender' to be + # used as a keyword argument- i.e. it's an invisible name in the + # function signature. + if len(sender) == 0: + sender = None + elif len(sender) > 1: + raise TypeError('send() accepts only one positional argument, ' + '%s given' % len(sender)) + else: + sender = sender[0] + if not self.receivers: + return [] + else: + return [(receiver, receiver(sender, **kwargs)) + for receiver in self.receivers_for(sender)] + + def has_receivers_for(self, sender): + """True if there is probably a receiver for *sender*. + + Performs an optimistic check only. Does not guarantee that all + weakly referenced receivers are still alive. See + :meth:`receivers_for` for a stronger search. + + """ + if not self.receivers: + return False + if self._by_sender[ANY_ID]: + return True + if sender is ANY: + return False + return hashable_identity(sender) in self._by_sender + + def receivers_for(self, sender): + """Iterate all live receivers listening for *sender*.""" + # TODO: test receivers_for(ANY) + if self.receivers: + sender_id = hashable_identity(sender) + if sender_id in self._by_sender: + ids = (self._by_sender[ANY_ID] | + self._by_sender[sender_id]) + else: + ids = self._by_sender[ANY_ID].copy() + for receiver_id in ids: + receiver = self.receivers.get(receiver_id) + if receiver is None: + continue + if isinstance(receiver, WeakTypes): + strong = receiver() + if strong is None: + self._disconnect(receiver_id, ANY_ID) + continue + receiver = strong + yield receiver + + def disconnect(self, receiver, sender=ANY): + """Disconnect *receiver* from this signal's events. + + :param receiver: a previously :meth:`connected` callable + + :param sender: a specific sender to disconnect from, or :obj:`ANY` + to disconnect from all senders. Defaults to ``ANY``. + + """ + if sender is ANY: + sender_id = ANY_ID + else: + sender_id = hashable_identity(sender) + receiver_id = hashable_identity(receiver) + self._disconnect(receiver_id, sender_id) + + if ('receiver_disconnected' in self.__dict__ and + self.receiver_disconnected.receivers): + self.receiver_disconnected.send(self, + receiver=receiver, + sender=sender) + + def _disconnect(self, receiver_id, sender_id): + if sender_id == ANY_ID: + if self._by_receiver.pop(receiver_id, False): + for bucket in self._by_sender.values(): + bucket.discard(receiver_id) + self.receivers.pop(receiver_id, None) + else: + self._by_sender[sender_id].discard(receiver_id) + self._by_receiver[receiver_id].discard(sender_id) + + def _cleanup_receiver(self, receiver_ref): + """Disconnect a receiver from all senders.""" + self._disconnect(receiver_ref.receiver_id, ANY_ID) + + def _cleanup_sender(self, sender_ref): + """Disconnect all receivers from a sender.""" + sender_id = sender_ref.sender_id + assert sender_id != ANY_ID + self._weak_senders.pop(sender_id, None) + for receiver_id in self._by_sender.pop(sender_id, ()): + self._by_receiver[receiver_id].discard(sender_id) + + def _cleanup_bookkeeping(self): + """Prune unused sender/receiver bookeeping. Not threadsafe. + + Connecting & disconnecting leave behind a small amount of bookeeping + for the receiver and sender values. Typical workloads using Blinker, + for example in most web apps, Flask, CLI scripts, etc., are not + adversely affected by this bookkeeping. + + With a long-running Python process performing dynamic signal routing + with high volume- e.g. connecting to function closures, "senders" are + all unique object instances, and doing all of this over and over- you + may see memory usage will grow due to extraneous bookeeping. (An empty + set() for each stale sender/receiver pair.) + + This method will prune that bookeeping away, with the caveat that such + pruning is not threadsafe. The risk is that cleanup of a fully + disconnected receiver/sender pair occurs while another thread is + connecting that same pair. If you are in the highly dynamic, unique + receiver/sender situation that has lead you to this method, that + failure mode is perhaps not a big deal for you. + """ + for mapping in (self._by_sender, self._by_receiver): + for _id, bucket in list(mapping.items()): + if not bucket: + mapping.pop(_id, None) + + def _clear_state(self): + """Throw away all signal state. Useful for unit tests.""" + self._weak_senders.clear() + self.receivers.clear() + self._by_sender.clear() + self._by_receiver.clear() + + +receiver_connected = Signal("""\ +Sent by a :class:`Signal` after a receiver connects. + +:argument: the Signal that was connected to +:keyword receiver_arg: the connected receiver +:keyword sender_arg: the sender to connect to +:keyword weak_arg: true if the connection to receiver_arg is a weak reference + +.. deprecated:: 1.2 + +As of 1.2, individual signals have their own private +:attr:`~Signal.receiver_connected` and +:attr:`~Signal.receiver_disconnected` signals with a slightly simplified +call signature. This global signal is planned to be removed in 1.6. + +""") + + +class NamedSignal(Signal): + """A named generic notification emitter.""" + + def __init__(self, name, doc=None): + Signal.__init__(self, doc) + + #: The name of this signal. + self.name = name + + def __repr__(self): + base = Signal.__repr__(self) + return "%s; %r>" % (base[:-1], self.name) + + +class Namespace(dict): + """A mapping of signal names to signals.""" + + def signal(self, name, doc=None): + """Return the :class:`NamedSignal` *name*, creating it if required. + + Repeated calls to this function will return the same signal object. + + """ + try: + return self[name] + except KeyError: + return self.setdefault(name, NamedSignal(name, doc)) + + +class WeakNamespace(WeakValueDictionary): + """A weak mapping of signal names to signals. + + Automatically cleans up unused Signals when the last reference goes out + of scope. This namespace implementation exists for a measure of legacy + compatibility with Blinker <= 1.2, and may be dropped in the future. + + .. versionadded:: 1.3 + + """ + + def signal(self, name, doc=None): + """Return the :class:`NamedSignal` *name*, creating it if required. + + Repeated calls to this function will return the same signal object. + + """ + try: + return self[name] + except KeyError: + return self.setdefault(name, NamedSignal(name, doc)) + + +signal = Namespace().signal diff --git a/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/DESCRIPTION.rst b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..0b0953d --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/DESCRIPTION.rst @@ -0,0 +1,50 @@ +Certifi: Python SSL Certificates +================================ + +`Certifi`_ is a carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python2.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Certifi`: https://certifi.io/en/latest/ +.. _`Requests`: http://docs.python-requests.org/en/latest/ + + diff --git a/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/LICENSE.txt b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/LICENSE.txt new file mode 100644 index 0000000..802b53f --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/LICENSE.txt @@ -0,0 +1,21 @@ +This packge contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/METADATA b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/METADATA new file mode 100644 index 0000000..29540e3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/METADATA @@ -0,0 +1,74 @@ +Metadata-Version: 2.0 +Name: certifi +Version: 2019.6.16 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://certifi.io/ +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 + +Certifi: Python SSL Certificates +================================ + +`Certifi`_ is a carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python2.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Certifi`: https://certifi.io/en/latest/ +.. _`Requests`: http://docs.python-requests.org/en/latest/ + + diff --git a/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/RECORD b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/RECORD new file mode 100644 index 0000000..4ad5fe1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/RECORD @@ -0,0 +1,15 @@ +certifi/__init__.py,sha256=phsMyKTQP7MMe1wAHfhXPbQVxL3wXixOomxzNh5Cwa4,52 +certifi/__main__.py,sha256=FiOYt1Fltst7wk9DRa6GCoBr8qBUxlNQu_MKJf04E6s,41 +certifi/cacert.pem,sha256=DddOv7pQyMB8zNNgiXSSFrPVn7EN8qbe7P6h_IYyuek,282085 +certifi/core.py,sha256=EuFc2BsToG5O1-qsx4BSjQ1r1-7WRtH87b1WflZOWhI,218 +certifi-2019.6.16.dist-info/DESCRIPTION.rst,sha256=aLNHONztn2ZiBpSTivVFy6EDIWmuNYSsEQwx4NWbvB4,1580 +certifi-2019.6.16.dist-info/LICENSE.txt,sha256=anCkv2sBABbVmmS4rkrY3H9e8W8ftFPMLs13HFo0ETE,1048 +certifi-2019.6.16.dist-info/METADATA,sha256=bmxei5fIjQJCT_5_2k2ReQ1IDvrguA5Qan26BXRcbN0,2522 +certifi-2019.6.16.dist-info/RECORD,, +certifi-2019.6.16.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113 +certifi-2019.6.16.dist-info/metadata.json,sha256=soH2Ke2dIXqmSFFz1swsK3Uno_9ed57OqPIXuOxFXYA,1048 +certifi-2019.6.16.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi-2019.6.16.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi/__pycache__/__main__.cpython-36.pyc,, +certifi/__pycache__/core.cpython-36.pyc,, +certifi/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/WHEEL b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/WHEEL new file mode 100644 index 0000000..7bf9daa --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0.a0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/metadata.json b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/metadata.json new file mode 100644 index 0000000..afe3503 --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "extensions": {"python.details": {"contacts": [{"email": "me@kennethreitz.com", "name": "Kenneth Reitz", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://certifi.io/"}}}, "generator": "bdist_wheel (0.30.0.a0)", "license": "MPL-2.0", "metadata_version": "2.0", "name": "certifi", "summary": "Python package for providing Mozilla's CA Bundle.", "version": "2019.6.16"} \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/py3/lib/python3.6/site-packages/certifi/__init__.py b/py3/lib/python3.6/site-packages/certifi/__init__.py new file mode 100644 index 0000000..8ccb14e --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi/__init__.py @@ -0,0 +1,3 @@ +from .core import where + +__version__ = "2019.06.16" diff --git a/py3/lib/python3.6/site-packages/certifi/__main__.py b/py3/lib/python3.6/site-packages/certifi/__main__.py new file mode 100644 index 0000000..5f1da0d --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi/__main__.py @@ -0,0 +1,2 @@ +from certifi import where +print(where()) diff --git a/py3/lib/python3.6/site-packages/certifi/cacert.pem b/py3/lib/python3.6/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..9ca290f --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi/cacert.pem @@ -0,0 +1,4618 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Label: "QuoVadis Root CA" +# Serial: 985026699 +# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 +# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 +# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz +MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw +IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR +dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp +li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D +rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ +WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug +F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU +xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC +Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv +dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw +ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl +IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh +c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy +ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI +KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T +KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq +y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p +dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD +VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk +fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 +7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R +cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y +mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW +xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK +SnQ2+Q== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=Sonera Class2 CA O=Sonera +# Subject: CN=Sonera Class2 CA O=Sonera +# Label: "Sonera Class 2 Root CA" +# Serial: 29 +# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb +# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 +# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP +MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx +MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV +BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o +Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt +5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s +3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej +vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu +8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw +DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG +MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil +zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ +3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD +FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 +Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 +ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: O=Government Root Certification Authority +# Subject: O=Government Root Certification Authority +# Label: "Taiwan GRCA" +# Serial: 42023070807708724159991140556527066870 +# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e +# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 +# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ +MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow +PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR +IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q +gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy +yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts +F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 +jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx +ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC +VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK +YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH +EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN +Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud +DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE +MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK +UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ +TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf +qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK +ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE +JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 +hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 +EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm +nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX +udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz +ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe +LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl +pYYsfPQS +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=Class 2 Primary CA O=Certplus +# Subject: CN=Class 2 Primary CA O=Certplus +# Label: "Certplus Class 2 Primary CA" +# Serial: 177770208045934040241468760488327595043 +# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b +# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb +# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw +PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz +cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 +MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz +IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ +ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR +VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL +kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd +EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas +H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 +HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud +DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 +QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu +Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ +AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 +yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR +FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA +ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB +kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 +l7+ijrRU +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GA CA" +# Serial: 86718877871133159090080555911823548314 +# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 +# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 +# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 +-----BEGIN CERTIFICATE----- +MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB +ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly +aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl +ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w +NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G +A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD +VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX +SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR +VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 +w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF +mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg +4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 +4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw +EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx +SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 +ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 +vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa +hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi +Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ +/L7fCg0= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Label: "Deutsche Telekom Root CA 2" +# Serial: 38 +# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 +# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf +# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 +-----BEGIN CERTIFICATE----- +MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc +MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj +IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB +IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE +RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl +U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 +IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU +ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC +QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr +rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S +NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc +QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH +txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP +BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC +AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp +tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa +IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl +6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ +xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU +Cm26OWMohpLzGITY+9HPBVZkVw== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G2" +# Serial: 10000012 +# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a +# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 +# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f +-----BEGIN CERTIFICATE----- +MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX +DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 +qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp +uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU +Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE +pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp +5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M +UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN +GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy +5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv +6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK +eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 +B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ +BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov +L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG +SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS +CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen +5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 +IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK +gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL ++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL +vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm +bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk +N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC +Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z +ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Label: "Chambers of Commerce Root - 2008" +# Serial: 11806822484801597146 +# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 +# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c +# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz +IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz +MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj +dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw +EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp +MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 +28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq +VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q +DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR +5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL +ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a +Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl +UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s ++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 +Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx +hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV +HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 ++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN +YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t +L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy +ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt +IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV +HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w +DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW +PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF +5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 +glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH +FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 +pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD +xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG +tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq +jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De +fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ +d0jQ +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Label: "Global Chambersign Root - 2008" +# Serial: 14541511773111788494 +# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 +# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c +# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx +MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy +cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG +A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl +BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed +KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 +G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 +zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 +ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG +HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 +Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V +yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e +beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r +6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog +zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW +BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr +ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp +ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk +cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt +YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC +CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow +KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI +hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ +UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz +X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x +fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz +a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd +Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd +SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O +AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso +M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge +v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: O=Trustis Limited OU=Trustis FPS Root CA +# Subject: O=Trustis Limited OU=Trustis FPS Root CA +# Label: "Trustis FPS Root CA" +# Serial: 36053640375399034304724988975563710553 +# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d +# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 +# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d +-----BEGIN CERTIFICATE----- +MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL +ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx +MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc +MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ +AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH +iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj +vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA +0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB +OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ +BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E +FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 +GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW +zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 +1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE +f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F +jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN +ZetX2fNXlrtIzYE= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Label: "EE Certification Centre Root CA" +# Serial: 112324828676200291871926431888494945866 +# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f +# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 +# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 +-----BEGIN CERTIFICATE----- +MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 +MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 +czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG +CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy +MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl +ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS +b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy +euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO +bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw +WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d +MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE +1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ +zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB +BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF +BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV +v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG +E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u +uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW +iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v +GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G3" +# Serial: 10003001 +# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 +# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc +# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX +DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP +cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW +IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX +xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy +KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR +9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az +5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 +6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 +Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP +bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt +BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt +XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd +INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp +LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 +Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp +gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh +/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw +0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A +fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq +4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR +1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ +QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM +94B7IWcnMFk= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Label: "LuxTrust Global Root 2" +# Serial: 59914338225734147123941058376788110305822489521 +# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c +# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f +# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 +-----BEGIN CERTIFICATE----- +MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL +BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV +BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw +MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B +LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F +ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem +hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 +EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn +Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 +zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ +96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m +j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g +DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ +8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j +X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH +hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB +KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 +Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT ++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL +BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 +BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO +jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 +loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c +qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ +2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ +JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre +zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf +LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ +x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 +oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 146587175971765017618439757810265552097 +# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 +# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 +# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM +f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX +mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 +zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P +fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc +vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 +Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp +zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO +Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW +k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ +DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF +lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW +Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 +d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z +XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR +gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 +d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv +J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg +DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM ++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy +F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 +SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws +E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 146587176055767053814479386953112547951 +# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b +# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d +# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv +CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg +GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu +XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd +re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu +PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 +mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K +8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj +x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR +nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 +kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok +twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp +8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT +vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT +z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA +pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb +pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB +R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R +RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk +0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC +5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF +izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn +yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 146587176140553309517047991083707763997 +# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 +# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 +# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 +-----BEGIN CERTIFICATE----- +MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout +736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A +DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk +fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA +njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 146587176229350439916519468929765261721 +# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 +# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb +# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd +-----BEGIN CERTIFICATE----- +MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu +hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l +xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 +CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx +sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- diff --git a/py3/lib/python3.6/site-packages/certifi/core.py b/py3/lib/python3.6/site-packages/certifi/core.py new file mode 100644 index 0000000..7271acf --- /dev/null +++ b/py3/lib/python3.6/site-packages/certifi/core.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem. +""" +import os + + +def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, 'cacert.pem') diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/.libs_cffi_backend/libffi-ae16d830.so.6.0.4 b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/.libs_cffi_backend/libffi-ae16d830.so.6.0.4 new file mode 100644 index 0000000..3e4e438 Binary files /dev/null and b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/.libs_cffi_backend/libffi-ae16d830.so.6.0.4 differ diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/LICENSE.txt b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/LICENSE.txt new file mode 100644 index 0000000..29225ee --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/LICENSE.txt @@ -0,0 +1,26 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + The MIT License + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/PKG-INFO b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/PKG-INFO new file mode 100644 index 0000000..b2284b3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,36 @@ +Metadata-Version: 2.1 +Name: cffi +Version: 1.12.3 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/RECORD b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/RECORD new file mode 100644 index 0000000..3f1bcd6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/RECORD @@ -0,0 +1,28 @@ +_cffi_backend.cpython-36m-x86_64-linux-gnu.so,sha256=MTEsB338OuQyMi8196lpH7vjq-KEeSk7gL0CvOXwiiU,849704 +cffi/cffi_opcode.py,sha256=v9RdD_ovA8rCtqsC95Ivki5V667rAOhGgs3fb2q9xpM,5724 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/vengine_gen.py,sha256=Zkq0-EdeZwn6qUvf_CI8iUEs2UxVIvDmKCH1j0-y0GI,26676 +cffi/cparser.py,sha256=dcVqrRob1zqrCO--RZ6e-TtobJ7VMDpCU85W6QJ-N-4,40874 +cffi/backend_ctypes.py,sha256=_WkpD1SJel5gJovV-0u8hw-XvD3Efapqm9pIAEHTHn4,42449 +cffi/vengine_cpy.py,sha256=hdyjjZNijLrg_uGMnnFyC-7GG_LxWtwB8BlS2vvVDQ0,41470 +cffi/verifier.py,sha256=J9Enz2rbJb9CHPqWlWQ5uQESoyr0uc7MNWugchjXBv4,11207 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi/model.py,sha256=AYyjS26uiFKXtkm43qmStpy9zfGh5HVJF4UETYFBt6w,21682 +cffi/api.py,sha256=Q07iwDD0FRwWa2fx2ZzQft69iJs9aNR52fvrtUy3EY4,41800 +cffi/_cffi_include.h,sha256=JuFfmwpRE65vym3Nxr9vDMOIEuv21tXdarkL1l2WNms,12149 +cffi/commontypes.py,sha256=QS4uxCDI7JhtTyjh1hlnCA-gynmaszWxJaRRLGkJa1A,2689 +cffi/ffiplatform.py,sha256=HMXqR8ks2wtdsNxGaWpQ_PyqIvtiuos_vf1qKCy-cwg,4046 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/_embedding.h,sha256=PuNkRzXjURiRh7tXzVdIn0RD9pTJx04ZokHbcEO_3OY,17226 +cffi/setuptools_ext.py,sha256=qc6arfrSzm4RNT5oJz6d5td7KJ-pHfI7bqYD0X4Q-08,8848 +cffi/__init__.py,sha256=XPx-ySmw7OmYmr-7iXd3YoXhXj1HQLHYviMKpmAuWLc,513 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/_cffi_errors.h,sha256=6nFQ-4dRQI1bXRoSeqdvyKU33TmutQJB_2fAhWSzdl8,3856 +cffi/recompiler.py,sha256=LGqj7GPuq4KIG4axrN5G0Oy6YGmrLbBA0bHE-jCl6Oo,62711 +.libs_cffi_backend/libffi-ae16d830.so.6.0.4,sha256=DIjdi229NoMS7VUut0J5Q7Rk88JhtOndGB6xPyyszZM,149064 +cffi-1.12.3.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76 +cffi-1.12.3.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi-1.12.3.dist-info/RECORD,, +cffi-1.12.3.dist-info/WHEEL,sha256=d2ILPScH-y2UwGxsW1PeA2TT-KW0Git4AJ6LeOK8sQo,109 +cffi-1.12.3.dist-info/LICENSE.txt,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294 +cffi-1.12.3.dist-info/METADATA,sha256=OA_DlKzbYB72gWWz0R2ds_RJzTIzvFx5cnHf5NXTRuo,1140 diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/WHEEL b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/WHEEL new file mode 100644 index 0000000..92946fe --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: false +Tag: cp36-cp36m-manylinux1_x86_64 + diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/entry_points.txt b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/entry_points.txt new file mode 100644 index 0000000..eee7e0f --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/entry_points.txt @@ -0,0 +1,3 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules + diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/requires.txt b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/requires.txt new file mode 100644 index 0000000..dc1c9e1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/requires.txt @@ -0,0 +1 @@ +pycparser diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/top_level.txt b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/EGG-INFO/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/_cffi_backend.cpython-36m-x86_64-linux-gnu.so b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/_cffi_backend.cpython-36m-x86_64-linux-gnu.so new file mode 100644 index 0000000..06d29b5 Binary files /dev/null and b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/_cffi_backend.cpython-36m-x86_64-linux-gnu.so differ diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/__init__.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/__init__.py new file mode 100644 index 0000000..0224a15 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.12.3" +__version_info__ = (1, 12, 3) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_cffi_errors.h b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_cffi_errors.h new file mode 100644 index 0000000..83cdad0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_cffi_errors.h @@ -0,0 +1,147 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_cffi_include.h b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_cffi_include.h new file mode 100644 index 0000000..37ea74f --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_cffi_include.h @@ -0,0 +1,308 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + Issue #350 is still open: on Windows, the code here causes it to link + with PYTHON36.DLL (for example) instead of PYTHON3.DLL. A fix was + attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv + does not make PYTHON3.DLL available, and so the "correctly" compiled + version would not run inside a virtualenv. We will re-apply the fix + after virtualenv has been fixed for some time. For explanation, see + issue #355. For a workaround if you want PYTHON3.DLL and don't worry + about virtualenv, see issue #350. See also 'py_limited_api' in + setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) +# define Py_LIMITED_API +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t(x); +} + + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_embedding.h b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_embedding.h new file mode 100644 index 0000000..30842c1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/_embedding.h @@ -0,0 +1,518 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.12.3" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = -42; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value == locked_value); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + (void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/api.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/api.py new file mode 100644 index 0000000..32fe620 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/api.py @@ -0,0 +1,961 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + assert isinstance(name, basestring) or name is None + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if name is None: + if sys.platform != "win32": + return backend.load_library(None, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if libname is not None: + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/backend_ctypes.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/backend_ctypes.py new file mode 100644 index 0000000..679ae05 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return self._value + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/cffi_opcode.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/cffi_opcode.py new file mode 100644 index 0000000..a0df98d --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + 'float _Complex': PRIM_FLOATCOMPLEX, + 'double _Complex': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/commontypes.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/commontypes.py new file mode 100644 index 0000000..8ec97c7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/commontypes.py @@ -0,0 +1,80 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/cparser.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/cparser.py new file mode 100644 index 0000000..262fd76 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/cparser.py @@ -0,0 +1,931 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' in csource: + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + +def _preprocess(csource): + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literal! + csource = _r_comment.sub(' ', csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + return csource.replace('...', ' __dotdotdot__ '), macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + pass # skip pragma, only in pycparser 2.15 + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and + exprnode.op == '+'): + return (self._parse_constant(exprnode.left) + + self._parse_constant(exprnode.right)) + # + if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and + exprnode.op == '-'): + return (self._parse_constant(exprnode.left) - + self._parse_constant(exprnode.right)) + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/error.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/error.py new file mode 100644 index 0000000..0a27247 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/ffiplatform.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/ffiplatform.py new file mode 100644 index 0000000..8531346 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/ffiplatform.py @@ -0,0 +1,127 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + _hack_at_distutils() + from distutils.core import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + _hack_at_distutils() + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors, distutils.log + # + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = distutils.log.set_threshold(0) or 0 + try: + distutils.log.set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + distutils.log.set_threshold(old_level) + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/lock.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/model.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/model.py new file mode 100644 index 0000000..5f1b0d2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/model.py @@ -0,0 +1,614 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + 'float _Complex': 'j', + 'double _Complex': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length == '...': + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length == '...': + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/parse_c_type.h b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/parse_c_type.h new file mode 100644 index 0000000..84e4ef8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/pkgconfig.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/pkgconfig.py new file mode 100644 index 0000000..5c93f15 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/recompiler.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/recompiler.py new file mode 100644 index 0000000..20e912b --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/recompiler.py @@ -0,0 +1,1542 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in tp.enumfields(): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if self._num_externpy: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = (%s)alloca((size_t)datasize);' % ( + tovar, tp.get_c_name(''))) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + expand_anonymous_struct_union = not self.target_is_python + enumfields = list(tp.enumfields(expand_anonymous_struct_union)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s };' % (self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + for line in s.splitlines(True): + prnt(('// ' + line).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (ord(c),) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/setuptools_ext.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/setuptools_ext.py new file mode 100644 index 0000000..df5a518 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/setuptools_ext.py @@ -0,0 +1,217 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + For now we'll skip py_limited_api on all Windows versions to avoid an + inconsistent mess. + """ + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and sys.platform != 'win32'): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/vengine_cpy.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/vengine_cpy.py new file mode 100644 index 0000000..536f11f --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/vengine_cpy.py @@ -0,0 +1,1015 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, imp +from . import model +from .error import VerificationError + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = alloca((size_t)datasize);' % (tovar,)) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = (tp.length == '...')) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/vengine_gen.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/vengine_gen.py new file mode 100644 index 0000000..a64ff64 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/vengine_gen.py @@ -0,0 +1,675 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length == '...': + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif +''' diff --git a/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/verifier.py b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/verifier.py new file mode 100644 index 0000000..59b78c2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/cffi-1.12.3-py3.6-linux-x86_64.egg/cffi/verifier.py @@ -0,0 +1,306 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join([sys.version[:3], __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + ffiplatform._hack_at_distutils() # backward compatibility hack + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..c0f044d --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,70 @@ +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA new file mode 100644 index 0000000..1427867 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA @@ -0,0 +1,96 @@ +Metadata-Version: 2.0 +Name: chardet +Version: 3.0.4 +Summary: Universal encoding detector for Python 2 and 3 +Home-page: https://github.com/chardet/chardet +Author: Daniel Blanchard +Author-email: dan.blanchard@gmail.com +License: LGPL +Keywords: encoding,i18n,xml +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Linguistic + +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD new file mode 100644 index 0000000..7d52691 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD @@ -0,0 +1,91 @@ +chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 +chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 +chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 +chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 +chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 +chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 +chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 +chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 +chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 +chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 +chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 +chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 +chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 +chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 +chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +chardet/cli/chardetect.py,sha256=YBO8L4mXo0WR6_-Fjh_8QxPBoEBNqB9oNxNrdc54AQs,2738 +chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174 +chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239 +chardet-3.0.4.dist-info/RECORD,, +chardet-3.0.4.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +chardet-3.0.4.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60 +chardet-3.0.4.dist-info/metadata.json,sha256=0htbRM18ujyGZDdfowgAqj6Hq2eQtwzwyhaEveKntgo,1375 +chardet-3.0.4.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 +../../../bin/chardetect,sha256=eG1guLxUeysh9nIIaXeehPY8K1aROHueI8f9XLUI0Ks,269 +chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +chardet/__pycache__/enums.cpython-36.pyc,, +chardet/__pycache__/gb2312freq.cpython-36.pyc,, +chardet/__pycache__/euctwfreq.cpython-36.pyc,, +chardet/__pycache__/big5freq.cpython-36.pyc,, +chardet/__pycache__/sjisprober.cpython-36.pyc,, +chardet/__pycache__/eucjpprober.cpython-36.pyc,, +chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, +chardet/__pycache__/langgreekmodel.cpython-36.pyc,, +chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, +chardet/__pycache__/escsm.cpython-36.pyc,, +chardet/__pycache__/cp949prober.cpython-36.pyc,, +chardet/__pycache__/version.cpython-36.pyc,, +chardet/__pycache__/langturkishmodel.cpython-36.pyc,, +chardet/__pycache__/escprober.cpython-36.pyc,, +chardet/__pycache__/compat.cpython-36.pyc,, +chardet/__pycache__/latin1prober.cpython-36.pyc,, +chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/gb2312prober.cpython-36.pyc,, +chardet/__pycache__/hebrewprober.cpython-36.pyc,, +chardet/__pycache__/universaldetector.cpython-36.pyc,, +chardet/__pycache__/euctwprober.cpython-36.pyc,, +chardet/__pycache__/mbcssm.cpython-36.pyc,, +chardet/__pycache__/charsetprober.cpython-36.pyc,, +chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/big5prober.cpython-36.pyc,, +chardet/__pycache__/utf8prober.cpython-36.pyc,, +chardet/__pycache__/__init__.cpython-36.pyc,, +chardet/__pycache__/langthaimodel.cpython-36.pyc,, +chardet/__pycache__/euckrfreq.cpython-36.pyc,, +chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/jisfreq.cpython-36.pyc,, +chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/jpcntx.cpython-36.pyc,, +chardet/__pycache__/euckrprober.cpython-36.pyc,, +chardet/__pycache__/chardistribution.cpython-36.pyc,, +chardet/__pycache__/langcyrillicmodel.cpython-36.pyc,, +chardet/__pycache__/codingstatemachine.cpython-36.pyc,, +chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, +chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, +chardet/cli/__pycache__/chardetect.cpython-36.pyc,, +chardet/cli/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt new file mode 100644 index 0000000..a884269 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +chardetect = chardet.cli.chardetect:main + diff --git a/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json new file mode 100644 index 0000000..8cdf025 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Linguistic"], "extensions": {"python.commands": {"wrap_console": {"chardetect": "chardet.cli.chardetect:main"}}, "python.details": {"contacts": [{"email": "dan.blanchard@gmail.com", "name": "Daniel Blanchard", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/chardet/chardet"}}, "python.exports": {"console_scripts": {"chardetect": "chardet.cli.chardetect:main"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["encoding", "i18n", "xml"], "license": "LGPL", "metadata_version": "2.0", "name": "chardet", "summary": "Universal encoding detector for Python 2 and 3", "test_requires": [{"requires": ["hypothesis", "pytest"]}], "version": "3.0.4"} \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt new file mode 100644 index 0000000..79236f2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +chardet diff --git a/py3/lib/python3.6/site-packages/chardet/__init__.py b/py3/lib/python3.6/site-packages/chardet/__init__.py new file mode 100644 index 0000000..0f9f820 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/__init__.py @@ -0,0 +1,39 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +from .compat import PY2, PY3 +from .universaldetector import UniversalDetector +from .version import __version__, VERSION + + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{0}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() diff --git a/py3/lib/python3.6/site-packages/chardet/big5freq.py b/py3/lib/python3.6/site-packages/chardet/big5freq.py new file mode 100644 index 0000000..38f3251 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/py3/lib/python3.6/site-packages/chardet/big5prober.py b/py3/lib/python3.6/site-packages/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/py3/lib/python3.6/site-packages/chardet/chardistribution.py b/py3/lib/python3.6/site-packages/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/py3/lib/python3.6/site-packages/chardet/charsetgroupprober.py b/py3/lib/python3.6/site-packages/chardet/charsetgroupprober.py new file mode 100644 index 0000000..8b3738e --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/py3/lib/python3.6/site-packages/chardet/charsetprober.py b/py3/lib/python3.6/site-packages/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/py3/lib/python3.6/site-packages/chardet/cli/__init__.py b/py3/lib/python3.6/site-packages/chardet/cli/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/cli/__init__.py @@ -0,0 +1 @@ + diff --git a/py3/lib/python3.6/site-packages/chardet/cli/chardetect.py b/py3/lib/python3.6/site-packages/chardet/cli/chardetect.py new file mode 100644 index 0000000..f0a4cc5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/cli/chardetect.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from chardet import __version__ +from chardet.compat import PY2 +from chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + result['confidence']) + else: + return '{0}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {0}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/py3/lib/python3.6/site-packages/chardet/codingstatemachine.py b/py3/lib/python3.6/site-packages/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/py3/lib/python3.6/site-packages/chardet/compat.py b/py3/lib/python3.6/site-packages/chardet/compat.py new file mode 100644 index 0000000..ddd7468 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/compat.py @@ -0,0 +1,34 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + base_str = (str, unicode) + text_type = unicode +else: + PY2 = False + PY3 = True + base_str = (bytes, str) + text_type = str diff --git a/py3/lib/python3.6/site-packages/chardet/cp949prober.py b/py3/lib/python3.6/site-packages/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/py3/lib/python3.6/site-packages/chardet/enums.py b/py3/lib/python3.6/site-packages/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/py3/lib/python3.6/site-packages/chardet/escprober.py b/py3/lib/python3.6/site-packages/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/py3/lib/python3.6/site-packages/chardet/escsm.py b/py3/lib/python3.6/site-packages/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/py3/lib/python3.6/site-packages/chardet/eucjpprober.py b/py3/lib/python3.6/site-packages/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/py3/lib/python3.6/site-packages/chardet/euckrfreq.py b/py3/lib/python3.6/site-packages/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/py3/lib/python3.6/site-packages/chardet/euckrprober.py b/py3/lib/python3.6/site-packages/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/py3/lib/python3.6/site-packages/chardet/euctwfreq.py b/py3/lib/python3.6/site-packages/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/py3/lib/python3.6/site-packages/chardet/euctwprober.py b/py3/lib/python3.6/site-packages/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/py3/lib/python3.6/site-packages/chardet/gb2312freq.py b/py3/lib/python3.6/site-packages/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/py3/lib/python3.6/site-packages/chardet/gb2312prober.py b/py3/lib/python3.6/site-packages/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/py3/lib/python3.6/site-packages/chardet/hebrewprober.py b/py3/lib/python3.6/site-packages/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/py3/lib/python3.6/site-packages/chardet/jisfreq.py b/py3/lib/python3.6/site-packages/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/py3/lib/python3.6/site-packages/chardet/jpcntx.py b/py3/lib/python3.6/site-packages/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/py3/lib/python3.6/site-packages/chardet/langbulgarianmodel.py b/py3/lib/python3.6/site-packages/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..2aa4fb2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/langbulgarianmodel.py @@ -0,0 +1,228 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +# this table is modified base on win1251BulgarianCharToOrderMap, so +# only number <64 is sure valid + +Latin5_BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 +210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 + 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 + 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 +) + +win1251BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 +221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 + 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 + 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 96.9392% +# first 1024 sequences:3.0618% +# rest sequences: 0.2992% +# negative sequences: 0.0020% +BulgarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, +3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, +0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, +0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, +0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, +0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, +0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, +2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, +3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, +1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, +3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, +1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, +2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, +2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, +3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, +1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, +2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, +2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, +1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, +2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, +2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, +2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, +1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, +2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, +1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, +3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, +1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, +3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, +1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, +2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, +1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, +2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, +1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, +2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, +1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, +2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, +1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, +0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, +1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, +1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, +1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, +0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, +1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, +1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +) + +Latin5BulgarianModel = { + 'char_to_order_map': Latin5_BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Bulgairan', +} + +Win1251BulgarianModel = { + 'char_to_order_map': win1251BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Bulgarian', +} diff --git a/py3/lib/python3.6/site-packages/chardet/langcyrillicmodel.py b/py3/lib/python3.6/site-packages/chardet/langcyrillicmodel.py new file mode 100644 index 0000000..e5f9a1f --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/langcyrillicmodel.py @@ -0,0 +1,333 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# KOI8-R language model +# Character Mapping Table: +KOI8R_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 +223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 +238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 + 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 + 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 + 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 + 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 +) + +win1251_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +) + +latin5_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +macCyrillic_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, +) + +IBM855_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, +206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, + 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, +220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, +230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, + 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, + 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, +250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, +) + +IBM866_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 97.6601% +# first 1024 sequences: 2.3389% +# rest sequences: 0.1237% +# negative sequences: 0.0009% +RussianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, +1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, +1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, +2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, +1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, +3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, +1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, +2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, +1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, +1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, +1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, +1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, +3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, +1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, +2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, +1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, +2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, +1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, +1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, +1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, +3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, +3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, +1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, +1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, +0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, +1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, +1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, +0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, +1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, +2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, +1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, +1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, +2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, +1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, +1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, +1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, +0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, +0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, +0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, +2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, +0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +) + +Koi8rModel = { + 'char_to_order_map': KOI8R_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "KOI8-R", + 'language': 'Russian', +} + +Win1251CyrillicModel = { + 'char_to_order_map': win1251_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Russian', +} + +Latin5CyrillicModel = { + 'char_to_order_map': latin5_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Russian', +} + +MacCyrillicModel = { + 'char_to_order_map': macCyrillic_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "MacCyrillic", + 'language': 'Russian', +} + +Ibm866Model = { + 'char_to_order_map': IBM866_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM866", + 'language': 'Russian', +} + +Ibm855Model = { + 'char_to_order_map': IBM855_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM855", + 'language': 'Russian', +} diff --git a/py3/lib/python3.6/site-packages/chardet/langgreekmodel.py b/py3/lib/python3.6/site-packages/chardet/langgreekmodel.py new file mode 100644 index 0000000..5332221 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/langgreekmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin7_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +win1253_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.2851% +# first 1024 sequences:1.7001% +# rest sequences: 0.0359% +# negative sequences: 0.0148% +GreekLangModel = ( +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, +2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, +2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, +2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, +0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, +3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, +2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, +0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, +0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, +0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, +0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, +0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, +0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, +0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, +0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, +0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, +0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, +0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, +0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, +0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, +0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, +0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, +0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, +0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, +0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin7GreekModel = { + 'char_to_order_map': Latin7_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-7", + 'language': 'Greek', +} + +Win1253GreekModel = { + 'char_to_order_map': win1253_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "windows-1253", + 'language': 'Greek', +} diff --git a/py3/lib/python3.6/site-packages/chardet/langhebrewmodel.py b/py3/lib/python3.6/site-packages/chardet/langhebrewmodel.py new file mode 100644 index 0000000..58f4c87 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/langhebrewmodel.py @@ -0,0 +1,200 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Simon Montagu +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Shoshannah Forbes - original C code (?) +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Windows-1255 language model +# Character Mapping Table: +WIN1255_CHAR_TO_ORDER_MAP = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 + 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 +253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 + 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 +124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, +215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, + 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, +106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, + 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, +238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, + 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, + 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.4004% +# first 1024 sequences: 1.5981% +# rest sequences: 0.087% +# negative sequences: 0.0015% +HEBREW_LANG_MODEL = ( +0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, +3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, +1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, +1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, +1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, +1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, +0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, +0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, +0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, +0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, +0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, +0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, +0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, +0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, +0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, +0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, +0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, +0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, +0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, +1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, +1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, +2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, +0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, +0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, +) + +Win1255HebrewModel = { + 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, + 'precedence_matrix': HEBREW_LANG_MODEL, + 'typical_positive_ratio': 0.984004, + 'keep_english_letter': False, + 'charset_name': "windows-1255", + 'language': 'Hebrew', +} diff --git a/py3/lib/python3.6/site-packages/chardet/langhungarianmodel.py b/py3/lib/python3.6/site-packages/chardet/langhungarianmodel.py new file mode 100644 index 0000000..bb7c095 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/langhungarianmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin2_HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, +175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, + 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, + 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, +245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +win1250HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, +177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, + 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, + 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, +245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 94.7368% +# first 1024 sequences:5.2623% +# rest sequences: 0.8894% +# negative sequences: 0.0009% +HungarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, +3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, +0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, +1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, +1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, +3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, +2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, +2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, +2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, +2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, +1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, +1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, +3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, +1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, +1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, +2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, +2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, +2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, +3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, +1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, +1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, +1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, +2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, +1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, +2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, +2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, +1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, +1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, +0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, +2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, +2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, +1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, +1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, +2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, +2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, +2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, +1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, +0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +) + +Latin2HungarianModel = { + 'char_to_order_map': Latin2_HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-2", + 'language': 'Hungarian', +} + +Win1250HungarianModel = { + 'char_to_order_map': win1250HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "windows-1250", + 'language': 'Hungarian', +} diff --git a/py3/lib/python3.6/site-packages/chardet/langthaimodel.py b/py3/lib/python3.6/site-packages/chardet/langthaimodel.py new file mode 100644 index 0000000..15f94c2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/langthaimodel.py @@ -0,0 +1,199 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# The following result for thai was collected from a limited sample (1M). + +# Character Mapping Table: +TIS620CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 +188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 +253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 + 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 +209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, +223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, +236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, + 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, + 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, + 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, + 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, + 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 92.6386% +# first 1024 sequences:7.3177% +# rest sequences: 1.0230% +# negative sequences: 0.0436% +ThaiLangModel = ( +0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, +0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, +3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, +0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, +3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, +3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, +3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, +3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, +2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, +3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, +1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, +3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, +1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, +0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, +0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, +2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, +0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, +3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, +2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, +3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, +2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, +3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, +3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, +3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, +3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, +1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, +0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, +0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, +3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, +3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, +1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, +3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, +3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, +0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, +0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, +1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, +1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, +3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, +0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, +3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, +0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, +0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, +0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, +0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, +0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, +0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, +0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, +0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, +3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, +2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, +0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, +3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, +1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, +1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +TIS620ThaiModel = { + 'char_to_order_map': TIS620CharToOrderMap, + 'precedence_matrix': ThaiLangModel, + 'typical_positive_ratio': 0.926386, + 'keep_english_letter': False, + 'charset_name': "TIS-620", + 'language': 'Thai', +} diff --git a/py3/lib/python3.6/site-packages/chardet/langturkishmodel.py b/py3/lib/python3.6/site-packages/chardet/langturkishmodel.py new file mode 100644 index 0000000..a427a45 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/langturkishmodel.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Özgür Baskın - Turkish Language Model +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin5_TurkishCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, + 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, +255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, + 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, +180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, +164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, +150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, + 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, +124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, + 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, + 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, + 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, +) + +TurkishLangModel = ( +3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, +3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, +3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, +3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, +3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, +3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, +2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, +3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, +1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, +3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, +3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, +2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, +2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, +3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, +0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, +3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, +3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, +0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, +1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, +3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, +1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, +3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, +0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, +3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, +1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, +1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, +2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, +2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, +3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, +1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, +0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, +3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, +0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, +3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, +1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, +2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, +0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, +3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, +0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, +0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, +3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, +0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, +0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, +3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, +0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, +3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, +0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, +0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, +3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, +0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, +3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, +0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, +0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, +0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, +0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, +0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, +0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, +1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, +0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, +0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, +3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, +0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, +2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, +2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, +0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, +0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin5TurkishModel = { + 'char_to_order_map': Latin5_TurkishCharToOrderMap, + 'precedence_matrix': TurkishLangModel, + 'typical_positive_ratio': 0.970290, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-9", + 'language': 'Turkish', +} diff --git a/py3/lib/python3.6/site-packages/chardet/latin1prober.py b/py3/lib/python3.6/site-packages/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/py3/lib/python3.6/site-packages/chardet/mbcharsetprober.py b/py3/lib/python3.6/site-packages/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/py3/lib/python3.6/site-packages/chardet/mbcsgroupprober.py b/py3/lib/python3.6/site-packages/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/py3/lib/python3.6/site-packages/chardet/mbcssm.py b/py3/lib/python3.6/site-packages/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/py3/lib/python3.6/site-packages/chardet/sbcharsetprober.py b/py3/lib/python3.6/site-packages/chardet/sbcharsetprober.py new file mode 100644 index 0000000..0adb51d --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/sbcharsetprober.py @@ -0,0 +1,132 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model['charset_name'] + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.get('language') + + def feed(self, byte_str): + if not self._model['keep_english_letter']: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model['char_to_order_map'] + for i, c in enumerate(byte_str): + # XXX: Order is in range 1-64, so one would think we want 0-63 here, + # but that leads to 27 more test failures than before. + order = char_to_order_map[c] + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + i = (self._last_order * self.SAMPLE_SIZE) + order + model = self._model['precedence_matrix'][i] + else: # reverse the order of the letters in the lookup + i = (order * self.SAMPLE_SIZE) + self._last_order + model = self._model['precedence_matrix'][i] + self._seq_counters[model] += 1 + self._last_order = order + + charset_name = self._model['charset_name'] + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model['typical_positive_ratio']) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/py3/lib/python3.6/site-packages/chardet/sbcsgroupprober.py b/py3/lib/python3.6/site-packages/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..98e95dc --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/sbcsgroupprober.py @@ -0,0 +1,73 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .sbcharsetprober import SingleByteCharSetProber +from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, + Latin5CyrillicModel, MacCyrillicModel, + Ibm866Model, Ibm855Model) +from .langgreekmodel import Latin7GreekModel, Win1253GreekModel +from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel +# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +from .langthaimodel import TIS620ThaiModel +from .langhebrewmodel import Win1255HebrewModel +from .hebrewprober import HebrewProber +from .langturkishmodel import Latin5TurkishModel + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + self.probers = [ + SingleByteCharSetProber(Win1251CyrillicModel), + SingleByteCharSetProber(Koi8rModel), + SingleByteCharSetProber(Latin5CyrillicModel), + SingleByteCharSetProber(MacCyrillicModel), + SingleByteCharSetProber(Ibm866Model), + SingleByteCharSetProber(Ibm855Model), + SingleByteCharSetProber(Latin7GreekModel), + SingleByteCharSetProber(Win1253GreekModel), + SingleByteCharSetProber(Latin5BulgarianModel), + SingleByteCharSetProber(Win1251BulgarianModel), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(Latin2HungarianModel), + # SingleByteCharSetProber(Win1250HungarianModel), + SingleByteCharSetProber(TIS620ThaiModel), + SingleByteCharSetProber(Latin5TurkishModel), + ] + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, + False, hebrew_prober) + visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, + hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + self.probers.extend([hebrew_prober, logical_hebrew_prober, + visual_hebrew_prober]) + + self.reset() diff --git a/py3/lib/python3.6/site-packages/chardet/sjisprober.py b/py3/lib/python3.6/site-packages/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/py3/lib/python3.6/site-packages/chardet/universaldetector.py b/py3/lib/python3.6/site-packages/chardet/universaldetector.py new file mode 100644 index 0000000..7b4e92d --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() == logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + return self.result diff --git a/py3/lib/python3.6/site-packages/chardet/utf8prober.py b/py3/lib/python3.6/site-packages/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/py3/lib/python3.6/site-packages/chardet/version.py b/py3/lib/python3.6/site-packages/chardet/version.py new file mode 100644 index 0000000..bb2a34a --- /dev/null +++ b/py3/lib/python3.6/site-packages/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "3.0.4" +VERSION = __version__.split('.') diff --git a/py3/lib/python3.6/site-packages/click/__init__.py b/py3/lib/python3.6/site-packages/click/__init__.py new file mode 100644 index 0000000..d3c3366 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/__init__.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +""" +click +~~~~~ + +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. + +:copyright: © 2014 by the Pallets team. +:license: BSD, see LICENSE.rst for more details. +""" + +# Core classes +from .core import Context, BaseCommand, Command, MultiCommand, Group, \ + CommandCollection, Parameter, Option, Argument + +# Globals +from .globals import get_current_context + +# Decorators +from .decorators import pass_context, pass_obj, make_pass_decorator, \ + command, group, argument, option, confirmation_option, \ + password_option, version_option, help_option + +# Types +from .types import ParamType, File, Path, Choice, IntRange, Tuple, \ + DateTime, STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED, FloatRange + +# Utilities +from .utils import echo, get_binary_stream, get_text_stream, open_file, \ + format_filename, get_app_dir, get_os_args + +# Terminal functions +from .termui import prompt, confirm, get_terminal_size, echo_via_pager, \ + progressbar, clear, style, unstyle, secho, edit, launch, getchar, \ + pause + +# Exceptions +from .exceptions import ClickException, UsageError, BadParameter, \ + FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \ + MissingParameter + +# Formatting +from .formatting import HelpFormatter, wrap_text + +# Parsing +from .parser import OptionParser + + +__all__ = [ + # Core classes + 'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group', + 'CommandCollection', 'Parameter', 'Option', 'Argument', + + # Globals + 'get_current_context', + + # Decorators + 'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group', + 'argument', 'option', 'confirmation_option', 'password_option', + 'version_option', 'help_option', + + # Types + 'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', + 'DateTime', 'STRING', 'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED', + 'FloatRange', + + # Utilities + 'echo', 'get_binary_stream', 'get_text_stream', 'open_file', + 'format_filename', 'get_app_dir', 'get_os_args', + + # Terminal functions + 'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager', + 'progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch', + 'getchar', 'pause', + + # Exceptions + 'ClickException', 'UsageError', 'BadParameter', 'FileError', + 'Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage', + 'MissingParameter', + + # Formatting + 'HelpFormatter', 'wrap_text', + + # Parsing + 'OptionParser', +] + + +# Controls if click should emit the warning about the use of unicode +# literals. +disable_unicode_literals_warning = False + + +__version__ = '7.0' diff --git a/py3/lib/python3.6/site-packages/click/_bashcomplete.py b/py3/lib/python3.6/site-packages/click/_bashcomplete.py new file mode 100644 index 0000000..a5f1084 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/_bashcomplete.py @@ -0,0 +1,293 @@ +import copy +import os +import re + +from .utils import echo +from .parser import split_arg_string +from .core import MultiCommand, Option, Argument +from .types import Choice + +try: + from collections import abc +except ImportError: + import collections as abc + +WORDBREAK = '=' + +# Note, only BASH version 4.4 and later have the nosort option. +COMPLETION_SCRIPT_BASH = ''' +%(complete_func)s() { + local IFS=$'\n' + COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + %(autocomplete_var)s=complete $1 ) ) + return 0 +} + +%(complete_func)setup() { + local COMPLETION_OPTIONS="" + local BASH_VERSION_ARR=(${BASH_VERSION//./ }) + # Only BASH version 4.4 and later have the nosort option. + if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then + COMPLETION_OPTIONS="-o nosort" + fi + + complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s +} + +%(complete_func)setup +''' + +COMPLETION_SCRIPT_ZSH = ''' +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\ + COMP_CWORD=$((CURRENT-1)) \\ + %(autocomplete_var)s=\"complete_zsh\" \\ + %(script_names)s )}") + + for key descr in ${(kv)response}; do + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U -Q + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -Q -a completions + fi + compstate[insert]="automenu" +} + +compdef %(complete_func)s %(script_names)s +''' + +_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]') + + +def get_completion_script(prog_name, complete_var, shell): + cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_')) + script = COMPLETION_SCRIPT_ZSH if shell == 'zsh' else COMPLETION_SCRIPT_BASH + return (script % { + 'complete_func': '_%s_completion' % cf_name, + 'script_names': prog_name, + 'autocomplete_var': complete_var, + }).strip() + ';' + + +def resolve_ctx(cli, prog_name, args): + """ + Parse into a hierarchy of contexts. Contexts are connected through the parent variable. + :param cli: command definition + :param prog_name: the program that is running + :param args: full list of args + :return: the final context/command parsed + """ + ctx = cli.make_context(prog_name, args, resilient_parsing=True) + args = ctx.protected_args + ctx.args + while args: + if isinstance(ctx.command, MultiCommand): + if not ctx.command.chain: + cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) + if cmd is None: + return ctx + ctx = cmd.make_context(cmd_name, args, parent=ctx, + resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + # Walk chained subcommand contexts saving the last one. + while args: + cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) + if cmd is None: + return ctx + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True) + args = sub_ctx.args + ctx = sub_ctx + args = sub_ctx.protected_args + sub_ctx.args + else: + break + return ctx + + +def start_of_option(param_str): + """ + :param param_str: param_str to check + :return: whether or not this is the start of an option declaration (i.e. starts "-" or "--") + """ + return param_str and param_str[:1] == '-' + + +def is_incomplete_option(all_args, cmd_param): + """ + :param all_args: the full original list of args supplied + :param cmd_param: the current command paramter + :return: whether or not the last option declaration (i.e. starts "-" or "--") is incomplete and + corresponds to this cmd_param. In other words whether this cmd_param option can still accept + values + """ + if not isinstance(cmd_param, Option): + return False + if cmd_param.is_flag: + return False + last_option = None + for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])): + if index + 1 > cmd_param.nargs: + break + if start_of_option(arg_str): + last_option = arg_str + + return True if last_option and last_option in cmd_param.opts else False + + +def is_incomplete_argument(current_params, cmd_param): + """ + :param current_params: the current params and values for this argument as already entered + :param cmd_param: the current command parameter + :return: whether or not the last argument is incomplete and corresponds to this cmd_param. In + other words whether or not the this cmd_param argument can still accept values + """ + if not isinstance(cmd_param, Argument): + return False + current_param_values = current_params[cmd_param.name] + if current_param_values is None: + return True + if cmd_param.nargs == -1: + return True + if isinstance(current_param_values, abc.Iterable) \ + and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs: + return True + return False + + +def get_user_autocompletions(ctx, args, incomplete, cmd_param): + """ + :param ctx: context associated with the parsed command + :param args: full list of args + :param incomplete: the incomplete text to autocomplete + :param cmd_param: command definition + :return: all the possible user-specified completions for the param + """ + results = [] + if isinstance(cmd_param.type, Choice): + # Choices don't support descriptions. + results = [(c, None) + for c in cmd_param.type.choices if str(c).startswith(incomplete)] + elif cmd_param.autocompletion is not None: + dynamic_completions = cmd_param.autocompletion(ctx=ctx, + args=args, + incomplete=incomplete) + results = [c if isinstance(c, tuple) else (c, None) + for c in dynamic_completions] + return results + + +def get_visible_commands_starting_with(ctx, starts_with): + """ + :param ctx: context associated with the parsed command + :starts_with: string that visible commands must start with. + :return: all visible (not hidden) commands that start with starts_with. + """ + for c in ctx.command.list_commands(ctx): + if c.startswith(starts_with): + command = ctx.command.get_command(ctx, c) + if not command.hidden: + yield command + + +def add_subcommand_completions(ctx, incomplete, completions_out): + # Add subcommand completions. + if isinstance(ctx.command, MultiCommand): + completions_out.extend( + [(c.name, c.get_short_help_str()) for c in get_visible_commands_starting_with(ctx, incomplete)]) + + # Walk up the context list and add any other completion possibilities from chained commands + while ctx.parent is not None: + ctx = ctx.parent + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + remaining_commands = [c for c in get_visible_commands_starting_with(ctx, incomplete) + if c.name not in ctx.protected_args] + completions_out.extend([(c.name, c.get_short_help_str()) for c in remaining_commands]) + + +def get_choices(cli, prog_name, args, incomplete): + """ + :param cli: command definition + :param prog_name: the program that is running + :param args: full list of args + :param incomplete: the incomplete text to autocomplete + :return: all the possible completions for the incomplete + """ + all_args = copy.deepcopy(args) + + ctx = resolve_ctx(cli, prog_name, args) + if ctx is None: + return [] + + # In newer versions of bash long opts with '='s are partitioned, but it's easier to parse + # without the '=' + if start_of_option(incomplete) and WORDBREAK in incomplete: + partition_incomplete = incomplete.partition(WORDBREAK) + all_args.append(partition_incomplete[0]) + incomplete = partition_incomplete[2] + elif incomplete == WORDBREAK: + incomplete = '' + + completions = [] + if start_of_option(incomplete): + # completions for partial options + for param in ctx.command.params: + if isinstance(param, Option) and not param.hidden: + param_opts = [param_opt for param_opt in param.opts + + param.secondary_opts if param_opt not in all_args or param.multiple] + completions.extend([(o, param.help) for o in param_opts if o.startswith(incomplete)]) + return completions + # completion for option values from user supplied values + for param in ctx.command.params: + if is_incomplete_option(all_args, param): + return get_user_autocompletions(ctx, all_args, incomplete, param) + # completion for argument values from user supplied values + for param in ctx.command.params: + if is_incomplete_argument(ctx.params, param): + return get_user_autocompletions(ctx, all_args, incomplete, param) + + add_subcommand_completions(ctx, incomplete, completions) + # Sort before returning so that proper ordering can be enforced in custom types. + return sorted(completions) + + +def do_complete(cli, prog_name, include_descriptions): + cwords = split_arg_string(os.environ['COMP_WORDS']) + cword = int(os.environ['COMP_CWORD']) + args = cwords[1:cword] + try: + incomplete = cwords[cword] + except IndexError: + incomplete = '' + + for item in get_choices(cli, prog_name, args, incomplete): + echo(item[0]) + if include_descriptions: + # ZSH has trouble dealing with empty array parameters when returned from commands, so use a well defined character '_' to indicate no description is present. + echo(item[1] if item[1] else '_') + + return True + + +def bashcomplete(cli, prog_name, complete_var, complete_instr): + if complete_instr.startswith('source'): + shell = 'zsh' if complete_instr == 'source_zsh' else 'bash' + echo(get_completion_script(prog_name, complete_var, shell)) + return True + elif complete_instr == 'complete' or complete_instr == 'complete_zsh': + return do_complete(cli, prog_name, complete_instr == 'complete_zsh') + return False diff --git a/py3/lib/python3.6/site-packages/click/_compat.py b/py3/lib/python3.6/site-packages/click/_compat.py new file mode 100644 index 0000000..937e230 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/_compat.py @@ -0,0 +1,703 @@ +import re +import io +import os +import sys +import codecs +from weakref import WeakKeyDictionary + + +PY2 = sys.version_info[0] == 2 +CYGWIN = sys.platform.startswith('cygwin') +# Determine local App Engine environment, per Google's own suggestion +APP_ENGINE = ('APPENGINE_RUNTIME' in os.environ and + 'Development/' in os.environ['SERVER_SOFTWARE']) +WIN = sys.platform.startswith('win') and not APP_ENGINE +DEFAULT_COLUMNS = 80 + + +_ansi_re = re.compile(r'\033\[((?:\d|;)*)([a-zA-Z])') + + +def get_filesystem_encoding(): + return sys.getfilesystemencoding() or sys.getdefaultencoding() + + +def _make_text_stream(stream, encoding, errors, + force_readable=False, force_writable=False): + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = 'replace' + return _NonClosingTextIOWrapper(stream, encoding, errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable) + + +def is_ascii_encoding(encoding): + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == 'ascii' + except LookupError: + return False + + +def get_best_encoding(stream): + """Returns the default stream encoding if not found.""" + rv = getattr(stream, 'encoding', None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return 'utf-8' + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + + def __init__(self, stream, encoding, errors, + force_readable=False, force_writable=False, **extra): + self._stream = stream = _FixupStream(stream, force_readable, + force_writable) + io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra) + + # The io module is a place where the Python 3 text behavior + # was forced upon Python 2, so we need to unbreak + # it to look like Python 2. + if PY2: + def write(self, x): + if isinstance(x, str) or is_bytes(x): + try: + self.flush() + except Exception: + pass + return self.buffer.write(str(x)) + return io.TextIOWrapper.write(self, x) + + def writelines(self, lines): + for line in lines: + self.write(line) + + def __del__(self): + try: + self.detach() + except Exception: + pass + + def isatty(self): + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream(object): + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__(self, stream, force_readable=False, force_writable=False): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name): + return getattr(self._stream, name) + + def read1(self, size): + f = getattr(self._stream, 'read1', None) + if f is not None: + return f(size) + # We only dispatch to readline instead of read in Python 2 as we + # do not want cause problems with the different implementation + # of line buffering. + if PY2: + return self._stream.readline(size) + return self._stream.read(size) + + def readable(self): + if self._force_readable: + return True + x = getattr(self._stream, 'readable', None) + if x is not None: + return x() + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self): + if self._force_writable: + return True + x = getattr(self._stream, 'writable', None) + if x is not None: + return x() + try: + self._stream.write('') + except Exception: + try: + self._stream.write(b'') + except Exception: + return False + return True + + def seekable(self): + x = getattr(self._stream, 'seekable', None) + if x is not None: + return x() + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +if PY2: + text_type = unicode + bytes = str + raw_input = raw_input + string_types = (str, unicode) + int_types = (int, long) + iteritems = lambda x: x.iteritems() + range_type = xrange + + def is_bytes(x): + return isinstance(x, (buffer, bytearray)) + + _identifier_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$') + + # For Windows, we need to force stdout/stdin/stderr to binary if it's + # fetched for that. This obviously is not the most correct way to do + # it as it changes global state. Unfortunately, there does not seem to + # be a clear better way to do it as just reopening the file in binary + # mode does not change anything. + # + # An option would be to do what Python 3 does and to open the file as + # binary only, patch it back to the system, and then use a wrapper + # stream that converts newlines. It's not quite clear what's the + # correct option here. + # + # This code also lives in _winconsole for the fallback to the console + # emulation stream. + # + # There are also Windows environments where the `msvcrt` module is not + # available (which is why we use try-catch instead of the WIN variable + # here), such as the Google App Engine development server on Windows. In + # those cases there is just nothing we can do. + def set_binary_mode(f): + return f + + try: + import msvcrt + except ImportError: + pass + else: + def set_binary_mode(f): + try: + fileno = f.fileno() + except Exception: + pass + else: + msvcrt.setmode(fileno, os.O_BINARY) + return f + + try: + import fcntl + except ImportError: + pass + else: + def set_binary_mode(f): + try: + fileno = f.fileno() + except Exception: + pass + else: + flags = fcntl.fcntl(fileno, fcntl.F_GETFL) + fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) + return f + + def isidentifier(x): + return _identifier_re.search(x) is not None + + def get_binary_stdin(): + return set_binary_mode(sys.stdin) + + def get_binary_stdout(): + _wrap_std_stream('stdout') + return set_binary_mode(sys.stdout) + + def get_binary_stderr(): + _wrap_std_stream('stderr') + return set_binary_mode(sys.stderr) + + def get_text_stdin(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stdin, encoding, errors, + force_readable=True) + + def get_text_stdout(encoding=None, errors=None): + _wrap_std_stream('stdout') + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stdout, encoding, errors, + force_writable=True) + + def get_text_stderr(encoding=None, errors=None): + _wrap_std_stream('stderr') + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stderr, encoding, errors, + force_writable=True) + + def filename_to_ui(value): + if isinstance(value, bytes): + value = value.decode(get_filesystem_encoding(), 'replace') + return value +else: + import io + text_type = str + raw_input = input + string_types = (str,) + int_types = (int,) + range_type = range + isidentifier = lambda x: x.isidentifier() + iteritems = lambda x: iter(x.items()) + + def is_bytes(x): + return isinstance(x, (bytes, memoryview, bytearray)) + + def _is_binary_reader(stream, default=False): + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + def _is_binary_writer(stream, default=False): + try: + stream.write(b'') + except Exception: + try: + stream.write('') + return False + except Exception: + pass + return default + return True + + def _find_binary_reader(stream): + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return stream + + buf = getattr(stream, 'buffer', None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return buf + + def _find_binary_writer(stream): + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detatching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return stream + + buf = getattr(stream, 'buffer', None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return buf + + def _stream_is_misconfigured(stream): + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, 'encoding', None) or 'ascii') + + def _is_compatible_text_stream(stream, encoding, errors): + stream_encoding = getattr(stream, 'encoding', None) + stream_errors = getattr(stream, 'errors', None) + + # Perfect match. + if stream_encoding == encoding and stream_errors == errors: + return True + + # Otherwise, it's only a compatible stream if we did not ask for + # an encoding. + if encoding is None: + return stream_encoding is not None + + return False + + def _force_correct_text_reader(text_reader, encoding, errors, + force_readable=False): + if _is_binary_reader(text_reader, False): + binary_reader = text_reader + else: + # If there is no target encoding set, we need to verify that the + # reader is not actually misconfigured. + if encoding is None and not _stream_is_misconfigured(text_reader): + return text_reader + + if _is_compatible_text_stream(text_reader, encoding, errors): + return text_reader + + # If the reader has no encoding, we try to find the underlying + # binary reader for it. If that fails because the environment is + # misconfigured, we silently go with the same reader because this + # is too common to happen. In that case, mojibake is better than + # exceptions. + binary_reader = _find_binary_reader(text_reader) + if binary_reader is None: + return text_reader + + # At this point, we default the errors to replace instead of strict + # because nobody handles those errors anyways and at this point + # we're so fundamentally fucked that nothing can repair it. + if errors is None: + errors = 'replace' + return _make_text_stream(binary_reader, encoding, errors, + force_readable=force_readable) + + def _force_correct_text_writer(text_writer, encoding, errors, + force_writable=False): + if _is_binary_writer(text_writer, False): + binary_writer = text_writer + else: + # If there is no target encoding set, we need to verify that the + # writer is not actually misconfigured. + if encoding is None and not _stream_is_misconfigured(text_writer): + return text_writer + + if _is_compatible_text_stream(text_writer, encoding, errors): + return text_writer + + # If the writer has no encoding, we try to find the underlying + # binary writer for it. If that fails because the environment is + # misconfigured, we silently go with the same writer because this + # is too common to happen. In that case, mojibake is better than + # exceptions. + binary_writer = _find_binary_writer(text_writer) + if binary_writer is None: + return text_writer + + # At this point, we default the errors to replace instead of strict + # because nobody handles those errors anyways and at this point + # we're so fundamentally fucked that nothing can repair it. + if errors is None: + errors = 'replace' + return _make_text_stream(binary_writer, encoding, errors, + force_writable=force_writable) + + def get_binary_stdin(): + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError('Was not able to determine binary ' + 'stream for sys.stdin.') + return reader + + def get_binary_stdout(): + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError('Was not able to determine binary ' + 'stream for sys.stdout.') + return writer + + def get_binary_stderr(): + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError('Was not able to determine binary ' + 'stream for sys.stderr.') + return writer + + def get_text_stdin(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, + force_readable=True) + + def get_text_stdout(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, + force_writable=True) + + def get_text_stderr(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, + force_writable=True) + + def filename_to_ui(value): + if isinstance(value, bytes): + value = value.decode(get_filesystem_encoding(), 'replace') + else: + value = value.encode('utf-8', 'surrogateescape') \ + .decode('utf-8', 'replace') + return value + + +def get_streerror(e, default=None): + if hasattr(e, 'strerror'): + msg = e.strerror + else: + if default is not None: + msg = default + else: + msg = str(e) + if isinstance(msg, bytes): + msg = msg.decode('utf-8', 'replace') + return msg + + +def open_stream(filename, mode='r', encoding=None, errors='strict', + atomic=False): + # Standard streams first. These are simple because they don't need + # special handling for the atomic flag. It's entirely ignored. + if filename == '-': + if any(m in mode for m in ['w', 'a', 'x']): + if 'b' in mode: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if 'b' in mode: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + if encoding is None: + return open(filename, mode), True + return io.open(filename, mode, encoding=encoding, errors=errors), True + + # Some usability stuff for atomic writes + if 'a' in mode: + raise ValueError( + 'Appending to an existing file is not supported, because that ' + 'would involve an expensive `copy`-operation to a temporary ' + 'file. Open the file in normal `w`-mode and copy explicitly ' + 'if that\'s what you\'re after.' + ) + if 'x' in mode: + raise ValueError('Use the `overwrite`-parameter instead.') + if 'w' not in mode: + raise ValueError('Atomic writes only make sense with `w`-mode.') + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import tempfile + fd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename), + prefix='.__atomic-write') + + if encoding is not None: + f = io.open(fd, mode, encoding=encoding, errors=errors) + else: + f = os.fdopen(fd, mode) + + return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True + + +# Used in a destructor call, needs extra protection from interpreter cleanup. +if hasattr(os, 'replace'): + _replace = os.replace + _can_replace = True +else: + _replace = os.rename + _can_replace = not WIN + + +class _AtomicFile(object): + + def __init__(self, f, tmp_filename, real_filename): + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self): + return self._real_filename + + def close(self, delete=False): + if self.closed: + return + self._f.close() + if not _can_replace: + try: + os.remove(self._real_filename) + except OSError: + pass + _replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name): + return getattr(self._f, name) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close(delete=exc_type is not None) + + def __repr__(self): + return repr(self._f) + + +auto_wrap_for_ansi = None +colorama = None +get_winterm_size = None + + +def strip_ansi(value): + return _ansi_re.sub('', value) + + +def should_strip_ansi(stream=None, color=None): + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) + return not color + + +# If we're on Windows, we provide transparent integration through +# colorama. This will make ANSI colors through the echo function +# work automatically. +if WIN: + # Windows has a smaller terminal + DEFAULT_COLUMNS = 79 + + from ._winconsole import _get_windows_console_stream, _wrap_std_stream + + def _get_argv_encoding(): + import locale + return locale.getpreferredencoding() + + if PY2: + def raw_input(prompt=''): + sys.stderr.flush() + if prompt: + stdout = _default_text_stdout() + stdout.write(prompt) + stdin = _default_text_stdin() + return stdin.readline().rstrip('\r\n') + + try: + import colorama + except ImportError: + pass + else: + _ansi_stream_wrappers = WeakKeyDictionary() + + def auto_wrap_for_ansi(stream, color=None): + """This function wraps a stream so that calls through colorama + are issued to the win32 console API to recolor on demand. It + also ensures to reset the colors if a write call is interrupted + to not destroy the console afterwards. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + if cached is not None: + return cached + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = ansi_wrapper.stream + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + return rv + + def get_winterm_size(): + win = colorama.win32.GetConsoleScreenBufferInfo( + colorama.win32.STDOUT).srWindow + return win.Right - win.Left, win.Bottom - win.Top +else: + def _get_argv_encoding(): + return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding() + + _get_windows_console_stream = lambda *x: None + _wrap_std_stream = lambda *x: None + + +def term_len(x): + return len(strip_ansi(x)) + + +def isatty(stream): + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func(src_func, wrapper_func): + cache = WeakKeyDictionary() + def func(): + stream = src_func() + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + stream = src_func() # In case wrapper_func() modified the stream + cache[stream] = rv + except Exception: + pass + return rv + return func + + +_default_text_stdin = _make_cached_stream_func( + lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func( + lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func( + lambda: sys.stderr, get_text_stderr) + + +binary_streams = { + 'stdin': get_binary_stdin, + 'stdout': get_binary_stdout, + 'stderr': get_binary_stderr, +} + +text_streams = { + 'stdin': get_text_stdin, + 'stdout': get_text_stdout, + 'stderr': get_text_stderr, +} diff --git a/py3/lib/python3.6/site-packages/click/_termui_impl.py b/py3/lib/python3.6/site-packages/click/_termui_impl.py new file mode 100644 index 0000000..00a8e5e --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/_termui_impl.py @@ -0,0 +1,621 @@ +# -*- coding: utf-8 -*- +""" +click._termui_impl +~~~~~~~~~~~~~~~~~~ + +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. + +:copyright: © 2014 by the Pallets team. +:license: BSD, see LICENSE.rst for more details. +""" + +import os +import sys +import time +import math +import contextlib +from ._compat import _default_text_stdout, range_type, PY2, isatty, \ + open_stream, strip_ansi, term_len, get_best_encoding, WIN, int_types, \ + CYGWIN +from .utils import echo +from .exceptions import ClickException + + +if os.name == 'nt': + BEFORE_BAR = '\r' + AFTER_BAR = '\n' +else: + BEFORE_BAR = '\r\033[?25l' + AFTER_BAR = '\033[?25h\n' + + +def _length_hint(obj): + """Returns the length hint of an object.""" + try: + return len(obj) + except (AttributeError, TypeError): + try: + get_hint = type(obj).__length_hint__ + except AttributeError: + return None + try: + hint = get_hint(obj) + except TypeError: + return None + if hint is NotImplemented or \ + not isinstance(hint, int_types) or \ + hint < 0: + return None + return hint + + +class ProgressBar(object): + + def __init__(self, iterable, length=None, fill_char='#', empty_char=' ', + bar_template='%(bar)s', info_sep=' ', show_eta=True, + show_percent=None, show_pos=False, item_show_func=None, + label=None, file=None, color=None, width=30): + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label = label or '' + if file is None: + file = _default_text_stdout() + self.file = file + self.color = color + self.width = width + self.autowidth = width == 0 + + if length is None: + length = _length_hint(iterable) + if iterable is None: + if length is None: + raise TypeError('iterable or length is required') + iterable = range_type(length) + self.iter = iter(iterable) + self.length = length + self.length_known = length is not None + self.pos = 0 + self.avg = [] + self.start = self.last_eta = time.time() + self.eta_known = False + self.finished = False + self.max_width = None + self.entered = False + self.current_item = None + self.is_hidden = not isatty(self.file) + self._last_line = None + self.short_limit = 0.5 + + def __enter__(self): + self.entered = True + self.render_progress() + return self + + def __exit__(self, exc_type, exc_value, tb): + self.render_finish() + + def __iter__(self): + if not self.entered: + raise RuntimeError('You need to use progress bars in a with block.') + self.render_progress() + return self.generator() + + def is_fast(self): + return time.time() - self.start <= self.short_limit + + def render_finish(self): + if self.is_hidden or self.is_fast(): + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self): + if self.finished: + return 1.0 + return min(self.pos / (float(self.length) or 1), 1.0) + + @property + def time_per_iteration(self): + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self): + if self.length_known and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self): + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + days = t + return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds) + else: + return '%02d:%02d:%02d' % (hours, minutes, seconds) + return '' + + def format_pos(self): + pos = str(self.pos) + if self.length_known: + pos += '/%s' % self.length + return pos + + def format_pct(self): + return ('% 4d%%' % int(self.pct * 100))[1:] + + def format_bar(self): + if self.length_known: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + bar = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + bar[int((math.cos(self.pos * self.time_per_iteration) + / 2.0 + 0.5) * self.width)] = self.fill_char + bar = ''.join(bar) + return bar + + def format_progress_line(self): + show_percent = self.show_percent + + info_bits = [] + if self.length_known and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return (self.bar_template % { + 'label': self.label, + 'bar': self.format_bar(), + 'info': self.info_sep.join(info_bits) + }).rstrip() + + def render_progress(self): + from .termui import get_terminal_size + + if self.is_hidden: + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, get_terminal_size()[0] - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(' ' * self.max_width) + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(' ' * (clear_width - line_len)) + line = ''.join(buf) + # Render the line only if it changed. + + if line != self._last_line and not self.is_fast(): + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps): + self.pos += n_steps + if self.length_known and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length_known + + def update(self, n_steps): + self.make_step(n_steps) + self.render_progress() + + def finish(self): + self.eta_known = 0 + self.current_item = None + self.finished = True + + def generator(self): + """ + Returns a generator which yields the items added to the bar during + construction, and updates the progress bar *after* the yielded block + returns. + """ + if not self.entered: + raise RuntimeError('You need to use progress bars in a with block.') + + if self.is_hidden: + for rv in self.iter: + yield rv + else: + for rv in self.iter: + self.current_item = rv + yield rv + self.update(1) + self.finish() + self.render_progress() + + +def pager(generator, color=None): + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get('PAGER', None) or '').strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get('TERM') in ('dumb', 'emacs'): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith('os2'): + return _tempfilepager(generator, 'more <', color) + if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: + return _pipepager(generator, 'less', color) + + import tempfile + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0: + return _pipepager(generator, 'more', color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator, cmd, color): + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit('/', 1)[-1].split() + if color is None and cmd_detail[0] == 'less': + less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:]) + if not less_flags: + env['LESS'] = '-R' + color = True + elif 'r' in less_flags or 'R' in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, + env=env) + encoding = get_best_encoding(c.stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + c.stdin.write(text.encode(encoding, 'replace')) + except (IOError, KeyboardInterrupt): + pass + else: + c.stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager(generator, cmd, color): + """Page through text by invoking a program on a temporary file.""" + import tempfile + filename = tempfile.mktemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, 'wb')[0] as f: + f.write(text.encode(encoding)) + try: + os.system(cmd + ' "' + filename + '"') + finally: + os.unlink(filename) + + +def _nullpager(stream, generator, color): + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor(object): + + def __init__(self, editor=None, env=None, require_save=True, + extension='.txt'): + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self): + if self.editor is not None: + return self.editor + for key in 'VISUAL', 'EDITOR': + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return 'notepad' + for editor in 'vim', 'nano': + if os.system('which %s >/dev/null 2>&1' % editor) == 0: + return editor + return 'vi' + + def edit_file(self, filename): + import subprocess + editor = self.get_editor() + if self.env: + environ = os.environ.copy() + environ.update(self.env) + else: + environ = None + try: + c = subprocess.Popen('%s "%s"' % (editor, filename), + env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException('%s: Editing failed!' % editor) + except OSError as e: + raise ClickException('%s: Editing failed: %s' % (editor, e)) + + def edit(self, text): + import tempfile + + text = text or '' + if text and not text.endswith('\n'): + text += '\n' + + fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension) + try: + if WIN: + encoding = 'utf-8-sig' + text = text.replace('\n', '\r\n') + else: + encoding = 'utf-8' + text = text.encode(encoding) + + f = os.fdopen(fd, 'wb') + f.write(text) + f.close() + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save \ + and os.path.getmtime(name) == timestamp: + return None + + f = open(name, 'rb') + try: + rv = f.read() + finally: + f.close() + return rv.decode('utf-8-sig').replace('\r\n', '\n') + finally: + os.unlink(name) + + +def open_url(url, wait=False, locate=False): + import subprocess + + def _unquote_file(url): + try: + import urllib + except ImportError: + import urllib + if url.startswith('file://'): + url = urllib.unquote(url[7:]) + return url + + if sys.platform == 'darwin': + args = ['open'] + if wait: + args.append('-W') + if locate: + args.append('-R') + args.append(_unquote_file(url)) + null = open('/dev/null', 'w') + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url) + args = 'explorer /select,"%s"' % _unquote_file( + url.replace('"', '')) + else: + args = 'start %s "" "%s"' % ( + wait and '/WAIT' or '', url.replace('"', '')) + return os.system(args) + elif CYGWIN: + if locate: + url = _unquote_file(url) + args = 'cygstart "%s"' % (os.path.dirname(url).replace('"', '')) + else: + args = 'cygstart %s "%s"' % ( + wait and '-w' or '', url.replace('"', '')) + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or '.' + else: + url = _unquote_file(url) + c = subprocess.Popen(['xdg-open', url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(('http://', 'https://')) and not locate and not wait: + import webbrowser + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch): + if ch == u'\x03': + raise KeyboardInterrupt() + if ch == u'\x04' and not WIN: # Unix-like, Ctrl+D + raise EOFError() + if ch == u'\x1a' and WIN: # Windows, Ctrl+Z + raise EOFError() + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal(): + yield + + def getchar(echo): + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + if echo: + func = msvcrt.getwche + else: + func = msvcrt.getwch + + rv = func() + if rv in (u'\x00', u'\xe0'): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + _translate_ch_to_exc(rv) + return rv +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal(): + if not isatty(sys.stdin): + f = open('/dev/tty') + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + try: + old_settings = termios.tcgetattr(fd) + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo): + with raw_terminal() as fd: + ch = os.read(fd, 32) + ch = ch.decode(get_best_encoding(sys.stdin), 'replace') + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + _translate_ch_to_exc(ch) + return ch diff --git a/py3/lib/python3.6/site-packages/click/_textwrap.py b/py3/lib/python3.6/site-packages/click/_textwrap.py new file mode 100644 index 0000000..7e77603 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/_textwrap.py @@ -0,0 +1,38 @@ +import textwrap +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + + def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent): + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text): + rv = [] + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + if idx > 0: + indent = self.subsequent_indent + rv.append(indent + line) + return '\n'.join(rv) diff --git a/py3/lib/python3.6/site-packages/click/_unicodefun.py b/py3/lib/python3.6/site-packages/click/_unicodefun.py new file mode 100644 index 0000000..620edff --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/_unicodefun.py @@ -0,0 +1,125 @@ +import os +import sys +import codecs + +from ._compat import PY2 + + +# If someone wants to vendor click, we want to ensure the +# correct package is discovered. Ideally we could use a +# relative import here but unfortunately Python does not +# support that. +click = sys.modules[__name__.rsplit('.', 1)[0]] + + +def _find_unicode_literals_frame(): + import __future__ + if not hasattr(sys, '_getframe'): # not all Python implementations have it + return 0 + frm = sys._getframe(1) + idx = 1 + while frm is not None: + if frm.f_globals.get('__name__', '').startswith('click.'): + frm = frm.f_back + idx += 1 + elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: + return idx + else: + break + return 0 + + +def _check_for_unicode_literals(): + if not __debug__: + return + if not PY2 or click.disable_unicode_literals_warning: + return + bad_frame = _find_unicode_literals_frame() + if bad_frame <= 0: + return + from warnings import warn + warn(Warning('Click detected the use of the unicode_literals ' + '__future__ import. This is heavily discouraged ' + 'because it can introduce subtle bugs in your ' + 'code. You should instead use explicit u"" literals ' + 'for your unicode strings. For more information see ' + 'https://click.palletsprojects.com/python3/'), + stacklevel=bad_frame) + + +def _verify_python3_env(): + """Ensures that the environment is good for unicode on Python 3.""" + if PY2: + return + try: + import locale + fs_enc = codecs.lookup(locale.getpreferredencoding()).name + except Exception: + fs_enc = 'ascii' + if fs_enc != 'ascii': + return + + extra = '' + if os.name == 'posix': + import subprocess + try: + rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, + stderr=subprocess.PIPE).communicate()[0] + except OSError: + rv = b'' + good_locales = set() + has_c_utf8 = False + + # Make sure we're operating on text here. + if isinstance(rv, bytes): + rv = rv.decode('ascii', 'replace') + + for line in rv.splitlines(): + locale = line.strip() + if locale.lower().endswith(('.utf-8', '.utf8')): + good_locales.add(locale) + if locale.lower() in ('c.utf8', 'c.utf-8'): + has_c_utf8 = True + + extra += '\n\n' + if not good_locales: + extra += ( + 'Additional information: on this system no suitable UTF-8\n' + 'locales were discovered. This most likely requires resolving\n' + 'by reconfiguring the locale system.' + ) + elif has_c_utf8: + extra += ( + 'This system supports the C.UTF-8 locale which is recommended.\n' + 'You might be able to resolve your issue by exporting the\n' + 'following environment variables:\n\n' + ' export LC_ALL=C.UTF-8\n' + ' export LANG=C.UTF-8' + ) + else: + extra += ( + 'This system lists a couple of UTF-8 supporting locales that\n' + 'you can pick from. The following suitable locales were\n' + 'discovered: %s' + ) % ', '.join(sorted(good_locales)) + + bad_locale = None + for locale in os.environ.get('LC_ALL'), os.environ.get('LANG'): + if locale and locale.lower().endswith(('.utf-8', '.utf8')): + bad_locale = locale + if locale is not None: + break + if bad_locale is not None: + extra += ( + '\n\nClick discovered that you exported a UTF-8 locale\n' + 'but the locale system could not pick up from it because\n' + 'it does not exist. The exported locale is "%s" but it\n' + 'is not supported' + ) % bad_locale + + raise RuntimeError( + 'Click will abort further execution because Python 3 was' + ' configured to use ASCII as encoding for the environment.' + ' Consult https://click.palletsprojects.com/en/7.x/python3/ for' + ' mitigation steps.' + extra + ) diff --git a/py3/lib/python3.6/site-packages/click/_winconsole.py b/py3/lib/python3.6/site-packages/click/_winconsole.py new file mode 100644 index 0000000..bbb080d --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/_winconsole.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prmopt. + +import io +import os +import sys +import zlib +import time +import ctypes +import msvcrt +from ._compat import _NonClosingTextIOWrapper, text_type, PY2 +from ctypes import byref, POINTER, c_int, c_char, c_char_p, \ + c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPE +try: + from ctypes import pythonapi + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release +except ImportError: + pythonapi = None +from ctypes.wintypes import LPWSTR, LPCWSTR + + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)( + ('GetCommandLineW', windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE( + POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ('CommandLineToArgvW', windll.shell32)) + + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b'\x1a' +MAX_BYTES_WRITTEN = 32767 + + +class Py_buffer(ctypes.Structure): + _fields_ = [ + ('buf', c_void_p), + ('obj', py_object), + ('len', c_ssize_t), + ('itemsize', c_ssize_t), + ('readonly', c_int), + ('ndim', c_int), + ('format', c_char_p), + ('shape', c_ssize_p), + ('strides', c_ssize_p), + ('suboffsets', c_ssize_p), + ('internal', c_void_p) + ] + + if PY2: + _fields_.insert(-1, ('smalltable', c_ssize_t * 2)) + + +# On PyPy we cannot get buffers so our ability to operate here is +# serverly limited. +if pythonapi is None: + get_buffer = None +else: + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + + def __init__(self, handle): + self.handle = handle + + def isatty(self): + io.RawIOBase.isatty(self) + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError('cannot read odd number of bytes from ' + 'UTF-16-LE encoded console') + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW(self.handle, buffer, code_units_to_be_read, + byref(code_units_read), None) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError('Windows error: %s' % GetLastError()) + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return 'ERROR_SUCCESS' + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return 'ERROR_NOT_ENOUGH_MEMORY' + return 'Windows error %s' % errno + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, + MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW(self.handle, buf, code_units_to_be_written, + byref(code_units_written), None) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream(object): + + def __init__(self, text_stream, byte_stream): + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self): + return self.buffer.name + + def write(self, x): + if isinstance(x, text_type): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines): + for line in lines: + self.write(line) + + def __getattr__(self, name): + return getattr(self._text_stream, name) + + def isatty(self): + return self.buffer.isatty() + + def __repr__(self): + return '' % ( + self.name, + self.encoding, + ) + + +class WindowsChunkedWriter(object): + """ + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()' which we wrap to write in + limited chunks due to a Windows limitation on binary console streams. + """ + def __init__(self, wrapped): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def write(self, text): + total_to_write = len(text) + written = 0 + + while written < total_to_write: + to_write = min(total_to_write - written, MAX_BYTES_WRITTEN) + self.__wrapped.write(text[written:written+to_write]) + written += to_write + + +_wrapped_std_streams = set() + + +def _wrap_std_stream(name): + # Python 2 & Windows 7 and below + if PY2 and sys.getwindowsversion()[:2] <= (6, 1) and name not in _wrapped_std_streams: + setattr(sys, name, WindowsChunkedWriter(getattr(sys, name))) + _wrapped_std_streams.add(name) + + +def _get_text_stdin(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + 'utf-16-le', 'strict', line_buffering=True) + return ConsoleStream(text_stream, buffer_stream) + + +def _get_text_stdout(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + 'utf-16-le', 'strict', line_buffering=True) + return ConsoleStream(text_stream, buffer_stream) + + +def _get_text_stderr(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + 'utf-16-le', 'strict', line_buffering=True) + return ConsoleStream(text_stream, buffer_stream) + + +if PY2: + def _hash_py_argv(): + return zlib.crc32('\x00'.join(sys.argv[1:])) + + _initial_argv_hash = _hash_py_argv() + + def _get_windows_argv(): + argc = c_int(0) + argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) + argv = [argv_unicode[i] for i in range(0, argc.value)] + + if not hasattr(sys, 'frozen'): + argv = argv[1:] + while len(argv) > 0: + arg = argv[0] + if not arg.startswith('-') or arg == '-': + break + argv = argv[1:] + if arg.startswith(('-c', '-m')): + break + + return argv[1:] + + +_stream_factories = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _get_windows_console_stream(f, encoding, errors): + if get_buffer is not None and \ + encoding in ('utf-16-le', None) \ + and errors in ('strict', None) and \ + hasattr(f, 'isatty') and f.isatty(): + func = _stream_factories.get(f.fileno()) + if func is not None: + if not PY2: + f = getattr(f, 'buffer', None) + if f is None: + return None + else: + # If we are on Python 2 we need to set the stream that we + # deal with to binary mode as otherwise the exercise if a + # bit moot. The same problems apply as for + # get_binary_stdin and friends from _compat. + msvcrt.setmode(f.fileno(), os.O_BINARY) + return func(f) diff --git a/py3/lib/python3.6/site-packages/click/core.py b/py3/lib/python3.6/site-packages/click/core.py new file mode 100644 index 0000000..7a1e342 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/core.py @@ -0,0 +1,1856 @@ +import errno +import inspect +import os +import sys +from contextlib import contextmanager +from itertools import repeat +from functools import update_wrapper + +from .types import convert_type, IntRange, BOOL +from .utils import PacifyFlushWrapper, make_str, make_default_short_help, \ + echo, get_os_args +from .exceptions import ClickException, UsageError, BadParameter, Abort, \ + MissingParameter, Exit +from .termui import prompt, confirm, style +from .formatting import HelpFormatter, join_options +from .parser import OptionParser, split_opt +from .globals import push_context, pop_context + +from ._compat import PY2, isidentifier, iteritems, string_types +from ._unicodefun import _check_for_unicode_literals, _verify_python3_env + + +_missing = object() + + +SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...' +SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...' + +DEPRECATED_HELP_NOTICE = ' (DEPRECATED)' +DEPRECATED_INVOKE_NOTICE = 'DeprecationWarning: ' + \ + 'The command %(name)s is deprecated.' + + +def _maybe_show_deprecated_notice(cmd): + if cmd.deprecated: + echo(style(DEPRECATED_INVOKE_NOTICE % {'name': cmd.name}, fg='red'), err=True) + + +def fast_exit(code): + """Exit without garbage collection, this speeds up exit by about 10ms for + things like bash completion. + """ + sys.stdout.flush() + sys.stderr.flush() + os._exit(code) + + +def _bashcomplete(cmd, prog_name, complete_var=None): + """Internal handler for the bash completion support.""" + if complete_var is None: + complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper() + complete_instr = os.environ.get(complete_var) + if not complete_instr: + return + + from ._bashcomplete import bashcomplete + if bashcomplete(cmd, prog_name, complete_var, complete_instr): + fast_exit(1) + + +def _check_multicommand(base_command, cmd_name, cmd, register=False): + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = 'It is not possible to add multi commands as children to ' \ + 'another multi command that is in chain mode' + else: + hint = 'Found a multi command as subcommand to a multi command ' \ + 'that is in chain mode. This is not supported' + raise RuntimeError('%s. Command "%s" is set to chain and "%s" was ' + 'added as subcommand but it in itself is a ' + 'multi command. ("%s" is a %s within a chained ' + '%s named "%s").' % ( + hint, base_command.name, cmd_name, + cmd_name, cmd.__class__.__name__, + base_command.__class__.__name__, + base_command.name)) + + +def batch(iterable, batch_size): + return list(zip(*repeat(iter(iterable), batch_size))) + + +def invoke_param_callback(callback, ctx, param, value): + code = getattr(callback, '__code__', None) + args = getattr(code, 'co_argcount', 3) + + if args < 3: + # This will become a warning in Click 3.0: + from warnings import warn + warn(Warning('Invoked legacy parameter callback "%s". The new ' + 'signature for such callbacks starting with ' + 'click 2.0 is (ctx, param, value).' + % callback), stacklevel=3) + return callback(ctx, value) + return callback(ctx, param, value) + + +@contextmanager +def augment_usage_errors(ctx, param=None): + """Context manager that attaches extra information to exceptions that + fly. + """ + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing(invocation_order, declaration_order): + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + def sort_key(item): + try: + idx = invocation_order.index(item) + except ValueError: + idx = float('inf') + return (not item.is_eager, idx) + + return sorted(declaration_order, key=sort_key) + + +class Context(object): + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + .. versionadded:: 2.0 + Added the `resilient_parsing`, `help_option_names`, + `token_normalize_func` parameters. + + .. versionadded:: 3.0 + Added the `allow_extra_args` and `allow_interspersed_args` + parameters. + + .. versionadded:: 4.0 + Added the `color`, `ignore_unknown_options`, and + `max_content_width` parameters. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + """ + + def __init__(self, command, parent=None, info_name=None, obj=None, + auto_envvar_prefix=None, default_map=None, + terminal_width=None, max_content_width=None, + resilient_parsing=False, allow_extra_args=None, + allow_interspersed_args=None, + ignore_unknown_options=None, help_option_names=None, + token_normalize_func=None, color=None): + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: the parsed parameters except if the value is hidden in which + #: case it's not remembered. + self.params = {} + #: the leftover arguments. + self.args = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args = [] + if obj is None and parent is not None: + obj = parent.obj + #: the user object stored. + self.obj = obj + self._meta = getattr(parent, 'meta', {}) + + #: A dictionary (-like object) with defaults for parameters. + if default_map is None \ + and parent is not None \ + and parent.default_map is not None: + default_map = parent.default_map.get(info_name) + self.default_map = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`resultcallback`. + self.invoked_subcommand = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + #: The width of the terminal (None is autodetection). + self.terminal_width = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ['--help'] + + #: The names for the help options. + self.help_option_names = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if parent is not None \ + and parent.auto_envvar_prefix is not None and \ + self.info_name is not None: + auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix, + self.info_name.upper()) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + self.auto_envvar_prefix = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color = color + + self._close_callbacks = [] + self._depth = 0 + + def __enter__(self): + self._depth += 1 + push_context(self) + return self + + def __exit__(self, exc_type, exc_value, tb): + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup=True): + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self): + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = __name__ + '.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self): + """Creates the formatter for the help and usage output.""" + return HelpFormatter(width=self.terminal_width, + max_width=self.max_content_width) + + def call_on_close(self, f): + """This decorator remembers a function as callback that should be + executed when the context tears down. This is most useful to bind + resource handling to the script execution. For instance, file objects + opened by the :class:`File` type will register their close callbacks + here. + + :param f: the function to execute on teardown. + """ + self._close_callbacks.append(f) + return f + + def close(self): + """Invokes all close callbacks.""" + for cb in self._close_callbacks: + cb() + self._close_callbacks = [] + + @property + def command_path(self): + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = '' + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + rv = self.parent.command_path + ' ' + rv + return rv.lstrip() + + def find_root(self): + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type): + """Finds the closest object of a given type.""" + node = self + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + node = node.parent + + def ensure_object(self, object_type): + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + def lookup_default(self, name): + """Looks up the default for a parameter name. This by default + looks into the :attr:`default_map` if available. + """ + if self.default_map is not None: + rv = self.default_map.get(name) + if callable(rv): + rv = rv() + return rv + + def fail(self, message): + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self): + """Aborts the script.""" + raise Abort() + + def exit(self, code=0): + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self): + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self): + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def invoke(*args, **kwargs): + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + """ + self, callback = args[:2] + ctx = self + + # It's also possible to invoke another command which might or + # might not have a callback. In that case we also fill + # in defaults and make a new context for this command. + if isinstance(callback, Command): + other_cmd = callback + callback = other_cmd.callback + ctx = Context(other_cmd, info_name=other_cmd.name, parent=self) + if callback is None: + raise TypeError('The given command does not have a ' + 'callback that can be invoked.') + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.get_default(ctx) + + args = args[2:] + with augment_usage_errors(self): + with ctx: + return callback(*args, **kwargs) + + def forward(*args, **kwargs): + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + """ + self, cmd = args[:2] + + # It's also possible to invoke another command which might or + # might not have a callback. + if not isinstance(cmd, Command): + raise TypeError('Callback is not a command.') + + for param in self.params: + if param not in kwargs: + kwargs[param] = self.params[param] + + return self.invoke(cmd, **kwargs) + + +class BaseCommand(object): + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__(self, name, context_settings=None): + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + if context_settings is None: + context_settings = {} + #: an optional dictionary with defaults passed to the context. + self.context_settings = context_settings + + def get_usage(self, ctx): + raise NotImplementedError('Base commands cannot get usage') + + def get_help(self, ctx): + raise NotImplementedError('Base commands cannot get help') + + def make_context(self, info_name, args, parent=None, **extra): + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + :param info_name: the info name for this invokation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it it's + the name of the script. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + """ + for key, value in iteritems(self.context_settings): + if key not in extra: + extra[key] = value + ctx = Context(self, info_name=info_name, parent=parent, **extra) + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx, args): + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError('Base commands do not know how to parse ' + 'arguments.') + + def invoke(self, ctx): + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError('Base commands are not invokable by default') + + def main(self, args=None, prog_name=None, complete_var=None, + standalone_mode=True, **extra): + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + .. versionadded:: 3.0 + Added the `standalone_mode` flag to control the standalone mode. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + """ + # If we are in Python 3, we will verify that the environment is + # sane at this point or reject further execution to avoid a + # broken script. + if not PY2: + _verify_python3_env() + else: + _check_for_unicode_literals() + + if args is None: + args = get_os_args() + else: + args = list(args) + + if prog_name is None: + prog_name = make_str(os.path.basename( + sys.argv and sys.argv[0] or __file__)) + + # Hook for the Bash completion. This only activates if the Bash + # completion is actually enabled, otherwise this is quite a fast + # noop. + _bashcomplete(self, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt): + echo(file=sys.stderr) + raise Abort() + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except IOError as e: + if e.errno == errno.EPIPE: + sys.stdout = PacifyFlushWrapper(sys.stdout) + sys.stderr = PacifyFlushWrapper(sys.stderr) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo('Aborted!', file=sys.stderr) + sys.exit(1) + + def __call__(self, *args, **kwargs): + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + """ + + def __init__(self, name, context_settings=None, callback=None, + params=None, help=None, epilog=None, short_help=None, + options_metavar='[OPTIONS]', add_help_option=True, + hidden=False, deprecated=False): + BaseCommand.__init__(self, name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params = params or [] + # if a form feed (page break) is found in the help text, truncate help + # text to the content preceding the first form feed + if help and '\f' in help: + help = help.split('\f', 1)[0] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.hidden = hidden + self.deprecated = deprecated + + def get_usage(self, ctx): + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip('\n') + + def get_params(self, ctx): + rv = self.params + help_option = self.get_help_option(ctx) + if help_option is not None: + rv = rv + [help_option] + return rv + + def format_usage(self, ctx, formatter): + """Writes the usage line into the formatter.""" + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, ' '.join(pieces)) + + def collect_usage_pieces(self, ctx): + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + return rv + + def get_help_option_names(self, ctx): + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return all_names + + def get_help_option(self, ctx): + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + if not help_options or not self.add_help_option: + return + + def show_help(ctx, param, value): + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + return Option(help_options, is_flag=True, + is_eager=True, expose_value=False, + callback=show_help, + help='Show this message and exit.') + + def make_parser(self, ctx): + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx): + """Formats the help into a string and returns it. This creates a + formatter and will call into the following formatting methods: + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip('\n') + + def get_short_help_str(self, limit=45): + """Gets short help for the command or makes it by shortening the long help string.""" + return self.short_help or self.help and make_default_short_help(self.help, limit) or '' + + def format_help(self, ctx, formatter): + """Writes the help into the formatter if it exists. + + This calls into the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx, formatter): + """Writes the help text to the formatter if it exists.""" + if self.help: + formatter.write_paragraph() + with formatter.indentation(): + help_text = self.help + if self.deprecated: + help_text += DEPRECATED_HELP_NOTICE + formatter.write_text(help_text) + elif self.deprecated: + formatter.write_paragraph() + with formatter.indentation(): + formatter.write_text(DEPRECATED_HELP_NOTICE) + + def format_options(self, ctx, formatter): + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section('Options'): + formatter.write_dl(opts) + + def format_epilog(self, ctx, formatter): + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + formatter.write_paragraph() + with formatter.indentation(): + formatter.write_text(self.epilog) + + def parse_args(self, ctx, args): + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing( + param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail('Got unexpected extra argument%s (%s)' + % (len(args) != 1 and 's' or '', + ' '.join(map(make_str, args)))) + + ctx.args = args + return args + + def invoke(self, ctx): + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + _maybe_show_deprecated_notice(self) + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: the result callback to attach to this multi + command. + """ + allow_extra_args = True + allow_interspersed_args = False + + def __init__(self, name=None, invoke_without_command=False, + no_args_is_help=None, subcommand_metavar=None, + chain=False, result_callback=None, **attrs): + Command.__init__(self, name, **attrs) + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + if subcommand_metavar is None: + if chain: + subcommand_metavar = SUBCOMMANDS_METAVAR + else: + subcommand_metavar = SUBCOMMAND_METAVAR + self.subcommand_metavar = subcommand_metavar + self.chain = chain + #: The result callback that is stored. This can be set or + #: overridden with the :func:`resultcallback` decorator. + self.result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError('Multi commands in chain mode cannot ' + 'have optional arguments.') + + def collect_usage_pieces(self, ctx): + rv = Command.collect_usage_pieces(self, ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx, formatter): + Command.format_options(self, ctx, formatter) + self.format_commands(ctx, formatter) + + def resultcallback(self, replace=False): + """Adds a result callback to the chain command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.resultcallback() + def process_result(result, input): + return result + input + + .. versionadded:: 3.0 + + :param replace: if set to `True` an already existing result + callback will be removed. + """ + def decorator(f): + old_callback = self.result_callback + if old_callback is None or replace: + self.result_callback = f + return f + def function(__value, *args, **kwargs): + return f(old_callback(__value, *args, **kwargs), + *args, **kwargs) + self.result_callback = rv = update_wrapper(function, f) + return rv + return decorator + + def format_commands(self, ctx, formatter): + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section('Commands'): + formatter.write_dl(rows) + + def parse_args(self, ctx, args): + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = Command.parse_args(self, ctx, args) + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx): + def _process_result(value): + if self.result_callback is not None: + value = ctx.invoke(self.result_callback, value, + **ctx.params) + return value + + if not ctx.protected_args: + # If we are invoked without command the chain flag controls + # how this happens. If we are not in chain mode, the return + # value here is the return value of the command. + # If however we are in chain mode, the return value is the + # return value of the result processor invoked with an empty + # list (which means that no subcommand actually was executed). + if self.invoke_without_command: + if not self.chain: + return Command.invoke(self, ctx) + with ctx: + Command.invoke(self, ctx) + return _process_result([]) + ctx.fail('Missing command.') + + # Fetch args back out + args = ctx.protected_args + ctx.args + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + ctx.invoked_subcommand = cmd_name + Command.invoke(self, ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = args and '*' or None + Command.invoke(self, ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command(self, ctx, args): + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail('No such command "%s".' % original_cmd_name) + + return cmd_name, cmd, args[1:] + + def get_command(self, ctx, cmd_name): + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError() + + def list_commands(self, ctx): + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is the + most common way to implement nesting in Click. + + :param commands: a dictionary of commands. + """ + + def __init__(self, name=None, commands=None, **attrs): + MultiCommand.__init__(self, name, **attrs) + #: the registered subcommands by their exported names. + self.commands = commands or {} + + def add_command(self, cmd, name=None): + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError('Command has no name.') + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + def command(self, *args, **kwargs): + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` but + immediately registers the created command with this instance by + calling into :meth:`add_command`. + """ + def decorator(f): + cmd = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + return decorator + + def group(self, *args, **kwargs): + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` but + immediately registers the created command with this instance by + calling into :meth:`add_command`. + """ + def decorator(f): + cmd = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + return decorator + + def get_command(self, ctx, cmd_name): + return self.commands.get(cmd_name) + + def list_commands(self, ctx): + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + """ + + def __init__(self, name=None, sources=None, **attrs): + MultiCommand.__init__(self, name, **attrs) + #: The list of registered multi commands. + self.sources = sources or [] + + def add_source(self, multi_cmd): + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx, cmd_name): + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + return rv + + def list_commands(self, ctx): + rv = set() + for source in self.sources: + rv.update(source.list_commands(ctx)) + return sorted(rv) + + +class Parameter(object): + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. In Click 2.0, the old callback format will still work, + but it will raise a warning to give you change to migrate the + code easier. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The later is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: a callback that should be executed after the parameter + was matched. This is called as ``fn(ctx, param, + value)`` and needs to return the value. Before Click + 2.0, the signature was ``(ctx, value)``. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + """ + param_type_name = 'parameter' + + def __init__(self, param_decls=None, type=None, required=False, + default=None, callback=None, nargs=None, metavar=None, + expose_value=True, is_eager=False, envvar=None, + autocompletion=None): + self.name, self.opts, self.secondary_opts = \ + self._parse_decls(param_decls or (), expose_value) + + self.type = convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = False + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self.autocompletion = autocompletion + + @property + def human_readable_name(self): + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name + + def make_metavar(self): + if self.metavar is not None: + return self.metavar + metavar = self.type.get_metavar(self) + if metavar is None: + metavar = self.type.name.upper() + if self.nargs != 1: + metavar += '...' + return metavar + + def get_default(self, ctx): + """Given a context variable this calculates the default value.""" + # Otherwise go with the regular default. + if callable(self.default): + rv = self.default() + else: + rv = self.default + return self.type_cast_value(ctx, rv) + + def add_to_parser(self, parser, ctx): + pass + + def consume_value(self, ctx, opts): + value = opts.get(self.name) + if value is None: + value = self.value_from_envvar(ctx) + if value is None: + value = ctx.lookup_default(self.name) + return value + + def type_cast_value(self, ctx, value): + """Given a value this runs it properly through the type system. + This automatically handles things like `nargs` and `multiple` as + well as composite types. + """ + if self.type.is_composite: + if self.nargs <= 1: + raise TypeError('Attempted to invoke composite type ' + 'but nargs has been set to %s. This is ' + 'not supported; nargs needs to be set to ' + 'a fixed value > 1.' % self.nargs) + if self.multiple: + return tuple(self.type(x or (), self, ctx) for x in value or ()) + return self.type(value or (), self, ctx) + + def _convert(value, level): + if level == 0: + return self.type(value, self, ctx) + return tuple(_convert(x, level - 1) for x in value or ()) + return _convert(value, (self.nargs != 1) + bool(self.multiple)) + + def process_value(self, ctx, value): + """Given a value and context this runs the logic to convert the + value as necessary. + """ + # If the value we were given is None we do nothing. This way + # code that calls this can easily figure out if something was + # not provided. Otherwise it would be converted into an empty + # tuple for multiple invocations which is inconvenient. + if value is not None: + return self.type_cast_value(ctx, value) + + def value_is_missing(self, value): + if value is None: + return True + if (self.nargs != 1 or self.multiple) and value == (): + return True + return False + + def full_process_value(self, ctx, value): + value = self.process_value(ctx, value) + + if value is None and not ctx.resilient_parsing: + value = self.get_default(ctx) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + return value + + def resolve_envvar_value(self, ctx): + if self.envvar is None: + return + if isinstance(self.envvar, (tuple, list)): + for envvar in self.envvar: + rv = os.environ.get(envvar) + if rv is not None: + return rv + else: + return os.environ.get(self.envvar) + + def value_from_envvar(self, ctx): + rv = self.resolve_envvar_value(ctx) + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + return rv + + def handle_parse_result(self, ctx, opts, args): + with augment_usage_errors(ctx, param=self): + value = self.consume_value(ctx, opts) + try: + value = self.full_process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + value = None + if self.callback is not None: + try: + value = invoke_param_callback( + self.callback, ctx, self, value) + except Exception: + if not ctx.resilient_parsing: + raise + + if self.expose_value: + ctx.params[self.name] = value + return value, args + + def get_help_record(self, ctx): + pass + + def get_usage_pieces(self, ctx): + return [] + + def get_error_hint(self, ctx): + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return ' / '.join('"%s"' % x for x in hint_list) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: controls if the default value should be shown on the + help page. Normally, defaults are not shown. If this + value is a string, it shows the string instead of the + value. This is particularly useful for dynamic options. + :param show_envvar: controls if an environment variable should be shown on + the help page. Normally, environment variables + are not shown. + :param prompt: if set to `True` or a non empty string then the user will be + prompted for input. If set to `True` the prompt will be the + option name capitalized. + :param confirmation_prompt: if set then the value will need to be confirmed + if it was prompted for. + :param hide_input: if this is `True` then the input on the prompt will be + hidden from the user. This is useful for password + input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + """ + param_type_name = 'option' + + def __init__(self, param_decls=None, show_default=False, + prompt=False, confirmation_prompt=False, + hide_input=False, is_flag=None, flag_value=None, + multiple=False, count=False, allow_from_autoenv=True, + type=None, help=None, hidden=False, show_choices=True, + show_envvar=False, **attrs): + default_is_missing = attrs.get('default', _missing) is _missing + Parameter.__init__(self, param_decls, type=type, **attrs) + + if prompt is True: + prompt_text = self.name.replace('_', ' ').capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.hide_input = hide_input + self.hidden = hidden + + # Flags + if is_flag is None: + if flag_value is not None: + is_flag = True + else: + is_flag = bool(self.secondary_opts) + if is_flag and default_is_missing: + self.default = False + if flag_value is None: + flag_value = not self.default + self.is_flag = is_flag + self.flag_value = flag_value + if self.is_flag and isinstance(self.flag_value, bool) \ + and type is None: + self.type = BOOL + self.is_bool_flag = True + else: + self.is_bool_flag = False + + # Counting + self.count = count + if count: + if type is None: + self.type = IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.multiple = multiple + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + # Sanity check for stuff we don't support + if __debug__: + if self.nargs < 0: + raise TypeError('Options cannot have nargs < 0') + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError('Cannot prompt for flags that are not bools.') + if not self.is_bool_flag and self.secondary_opts: + raise TypeError('Got secondary option for non boolean flag.') + if self.is_bool_flag and self.hide_input \ + and self.prompt is not None: + raise TypeError('Hidden input does not work with boolean ' + 'flag prompts.') + if self.count: + if self.multiple: + raise TypeError('Options cannot be multiple and count ' + 'at the same time.') + elif self.is_flag: + raise TypeError('Options cannot be count and flags at ' + 'the same time.') + + def _parse_decls(self, decls, expose_value): + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if isidentifier(decl): + if name is not None: + raise TypeError('Name defined twice') + name = decl + else: + split_char = decl[:1] == '/' and ';' or '/' + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace('-', '_').lower() + if not isidentifier(name): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError('Could not determine name for option') + + if not opts and not secondary_opts: + raise TypeError('No options defined but a name was passed (%s). ' + 'Did you mean to declare an argument instead ' + 'of an option?' % name) + + return name, opts, secondary_opts + + def add_to_parser(self, parser, ctx): + kwargs = { + 'dest': self.name, + 'nargs': self.nargs, + 'obj': self, + } + + if self.multiple: + action = 'append' + elif self.count: + action = 'count' + else: + action = 'store' + + if self.is_flag: + kwargs.pop('nargs', None) + if self.is_bool_flag and self.secondary_opts: + parser.add_option(self.opts, action=action + '_const', + const=True, **kwargs) + parser.add_option(self.secondary_opts, action=action + + '_const', const=False, **kwargs) + else: + parser.add_option(self.opts, action=action + '_const', + const=self.flag_value, + **kwargs) + else: + kwargs['action'] = action + parser.add_option(self.opts, **kwargs) + + def get_help_record(self, ctx): + if self.hidden: + return + any_prefix_is_slash = [] + + def _write_opts(opts): + rv, any_slashes = join_options(opts) + if any_slashes: + any_prefix_is_slash[:] = [True] + if not self.is_flag and not self.count: + rv += ' ' + self.make_metavar() + return rv + + rv = [_write_opts(self.opts)] + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or '' + extra = [] + if self.show_envvar: + envvar = self.envvar + if envvar is None: + if self.allow_from_autoenv and \ + ctx.auto_envvar_prefix is not None: + envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper()) + if envvar is not None: + extra.append('env var: %s' % ( + ', '.join('%s' % d for d in envvar) + if isinstance(envvar, (list, tuple)) + else envvar, )) + if self.default is not None and self.show_default: + if isinstance(self.show_default, string_types): + default_string = '({})'.format(self.show_default) + elif isinstance(self.default, (list, tuple)): + default_string = ', '.join('%s' % d for d in self.default) + elif inspect.isfunction(self.default): + default_string = "(dynamic)" + else: + default_string = self.default + extra.append('default: {}'.format(default_string)) + + if self.required: + extra.append('required') + if extra: + help = '%s[%s]' % (help and help + ' ' or '', '; '.join(extra)) + + return ((any_prefix_is_slash and '; ' or ' / ').join(rv), help) + + def get_default(self, ctx): + # If we're a non boolean flag out default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return param.flag_value + return None + return Parameter.get_default(self, ctx) + + def prompt_for_value(self, ctx): + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt(self.prompt, default=default, type=self.type, + hide_input=self.hide_input, show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x)) + + def resolve_envvar_value(self, ctx): + rv = Parameter.resolve_envvar_value(self, ctx) + if rv is not None: + return rv + if self.allow_from_autoenv and \ + ctx.auto_envvar_prefix is not None: + envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper()) + return os.environ.get(envvar) + + def value_from_envvar(self, ctx): + rv = self.resolve_envvar_value(ctx) + if rv is None: + return None + value_depth = (self.nargs != 1) + bool(self.multiple) + if value_depth > 0 and rv is not None: + rv = self.type.split_envvar_value(rv) + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + return rv + + def full_process_value(self, ctx, value): + if value is None and self.prompt is not None \ + and not ctx.resilient_parsing: + return self.prompt_for_value(ctx) + return Parameter.full_process_value(self, ctx, value) + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the parameter constructor. + """ + param_type_name = 'argument' + + def __init__(self, param_decls, required=None, **attrs): + if required is None: + if attrs.get('default') is not None: + required = False + else: + required = attrs.get('nargs', 1) > 0 + Parameter.__init__(self, param_decls, required=required, **attrs) + if self.default is not None and self.nargs < 0: + raise TypeError('nargs=-1 in combination with a default value ' + 'is not supported.') + + @property + def human_readable_name(self): + if self.metavar is not None: + return self.metavar + return self.name.upper() + + def make_metavar(self): + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() + if not self.required: + var = '[%s]' % var + if self.nargs != 1: + var += '...' + return var + + def _parse_decls(self, decls, expose_value): + if not decls: + if not expose_value: + return None, [], [] + raise TypeError('Could not determine name for argument') + if len(decls) == 1: + name = arg = decls[0] + name = name.replace('-', '_').lower() + else: + raise TypeError('Arguments take exactly one ' + 'parameter declaration, got %d' % len(decls)) + return name, [arg], [] + + def get_usage_pieces(self, ctx): + return [self.make_metavar()] + + def get_error_hint(self, ctx): + return '"%s"' % self.make_metavar() + + def add_to_parser(self, parser, ctx): + parser.add_argument(dest=self.name, nargs=self.nargs, + obj=self) + + +# Circular dependency between decorators and core +from .decorators import command, group diff --git a/py3/lib/python3.6/site-packages/click/decorators.py b/py3/lib/python3.6/site-packages/click/decorators.py new file mode 100644 index 0000000..c57c530 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/decorators.py @@ -0,0 +1,311 @@ +import sys +import inspect + +from functools import update_wrapper + +from ._compat import iteritems +from ._unicodefun import _check_for_unicode_literals +from .utils import echo +from .globals import get_current_context + + +def pass_context(f): + """Marks a callback as wanting to receive the current context + object as first argument. + """ + def new_func(*args, **kwargs): + return f(get_current_context(), *args, **kwargs) + return update_wrapper(new_func, f) + + +def pass_obj(f): + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + def new_func(*args, **kwargs): + return f(get_current_context().obj, *args, **kwargs) + return update_wrapper(new_func, f) + + +def make_pass_decorator(object_type, ensure=False): + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + def decorator(f): + def new_func(*args, **kwargs): + ctx = get_current_context() + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + if obj is None: + raise RuntimeError('Managed to invoke callback without a ' + 'context object of type %r existing' + % object_type.__name__) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + +def _make_command(f, name, attrs, cls): + if isinstance(f, Command): + raise TypeError('Attempted to convert a callback into a ' + 'command twice.') + try: + params = f.__click_params__ + params.reverse() + del f.__click_params__ + except AttributeError: + params = [] + help = attrs.get('help') + if help is None: + help = inspect.getdoc(f) + if isinstance(help, bytes): + help = help.decode('utf-8') + else: + help = inspect.cleandoc(help) + attrs['help'] = help + _check_for_unicode_literals() + return cls(name=name or f.__name__.lower().replace('_', '-'), + callback=f, params=params, **attrs) + + +def command(name=None, cls=None, **attrs): + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function. If you + want to change that, you can pass the intended name as the first + argument. + + All keyword arguments are forwarded to the underlying command class. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + """ + if cls is None: + cls = Command + def decorator(f): + cmd = _make_command(f, name, attrs, cls) + cmd.__doc__ = f.__doc__ + return cmd + return decorator + + +def group(name=None, **attrs): + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + """ + attrs.setdefault('cls', Group) + return command(name, **attrs) + + +def _param_memo(f, param): + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, '__click_params__'): + f.__click_params__ = [] + f.__click_params__.append(param) + + +def argument(*param_decls, **attrs): + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + """ + def decorator(f): + ArgumentClass = attrs.pop('cls', Argument) + _param_memo(f, ArgumentClass(param_decls, **attrs)) + return f + return decorator + + +def option(*param_decls, **attrs): + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + """ + def decorator(f): + # Issue 926, copy attrs, so pre-defined options can re-use the same cls= + option_attrs = attrs.copy() + + if 'help' in option_attrs: + option_attrs['help'] = inspect.cleandoc(option_attrs['help']) + OptionClass = option_attrs.pop('cls', Option) + _param_memo(f, OptionClass(param_decls, **option_attrs)) + return f + return decorator + + +def confirmation_option(*param_decls, **attrs): + """Shortcut for confirmation prompts that can be ignored by passing + ``--yes`` as parameter. + + This is equivalent to decorating a function with :func:`option` with + the following parameters:: + + def callback(ctx, param, value): + if not value: + ctx.abort() + + @click.command() + @click.option('--yes', is_flag=True, callback=callback, + expose_value=False, prompt='Do you want to continue?') + def dropdb(): + pass + """ + def decorator(f): + def callback(ctx, param, value): + if not value: + ctx.abort() + attrs.setdefault('is_flag', True) + attrs.setdefault('callback', callback) + attrs.setdefault('expose_value', False) + attrs.setdefault('prompt', 'Do you want to continue?') + attrs.setdefault('help', 'Confirm the action without prompting.') + return option(*(param_decls or ('--yes',)), **attrs)(f) + return decorator + + +def password_option(*param_decls, **attrs): + """Shortcut for password prompts. + + This is equivalent to decorating a function with :func:`option` with + the following parameters:: + + @click.command() + @click.option('--password', prompt=True, confirmation_prompt=True, + hide_input=True) + def changeadmin(password): + pass + """ + def decorator(f): + attrs.setdefault('prompt', True) + attrs.setdefault('confirmation_prompt', True) + attrs.setdefault('hide_input', True) + return option(*(param_decls or ('--password',)), **attrs)(f) + return decorator + + +def version_option(version=None, *param_decls, **attrs): + """Adds a ``--version`` option which immediately ends the program + printing out the version number. This is implemented as an eager + option that prints the version and exits the program in the callback. + + :param version: the version number to show. If not provided Click + attempts an auto discovery via setuptools. + :param prog_name: the name of the program (defaults to autodetection) + :param message: custom message to show instead of the default + (``'%(prog)s, version %(version)s'``) + :param others: everything else is forwarded to :func:`option`. + """ + if version is None: + if hasattr(sys, '_getframe'): + module = sys._getframe(1).f_globals.get('__name__') + else: + module = '' + + def decorator(f): + prog_name = attrs.pop('prog_name', None) + message = attrs.pop('message', '%(prog)s, version %(version)s') + + def callback(ctx, param, value): + if not value or ctx.resilient_parsing: + return + prog = prog_name + if prog is None: + prog = ctx.find_root().info_name + ver = version + if ver is None: + try: + import pkg_resources + except ImportError: + pass + else: + for dist in pkg_resources.working_set: + scripts = dist.get_entry_map().get('console_scripts') or {} + for script_name, entry_point in iteritems(scripts): + if entry_point.module_name == module: + ver = dist.version + break + if ver is None: + raise RuntimeError('Could not determine version') + echo(message % { + 'prog': prog, + 'version': ver, + }, color=ctx.color) + ctx.exit() + + attrs.setdefault('is_flag', True) + attrs.setdefault('expose_value', False) + attrs.setdefault('is_eager', True) + attrs.setdefault('help', 'Show the version and exit.') + attrs['callback'] = callback + return option(*(param_decls or ('--version',)), **attrs)(f) + return decorator + + +def help_option(*param_decls, **attrs): + """Adds a ``--help`` option which immediately ends the program + printing out the help page. This is usually unnecessary to add as + this is added by default to all commands unless suppressed. + + Like :func:`version_option`, this is implemented as eager option that + prints in the callback and exits. + + All arguments are forwarded to :func:`option`. + """ + def decorator(f): + def callback(ctx, param, value): + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + attrs.setdefault('is_flag', True) + attrs.setdefault('expose_value', False) + attrs.setdefault('help', 'Show this message and exit.') + attrs.setdefault('is_eager', True) + attrs['callback'] = callback + return option(*(param_decls or ('--help',)), **attrs)(f) + return decorator + + +# Circular dependencies between core and decorators +from .core import Command, Group, Argument, Option diff --git a/py3/lib/python3.6/site-packages/click/exceptions.py b/py3/lib/python3.6/site-packages/click/exceptions.py new file mode 100644 index 0000000..6fa1765 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/exceptions.py @@ -0,0 +1,235 @@ +from ._compat import PY2, filename_to_ui, get_text_stderr +from .utils import echo + + +def _join_param_hints(param_hint): + if isinstance(param_hint, (tuple, list)): + return ' / '.join('"%s"' % x for x in param_hint) + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception + exit_code = 1 + + def __init__(self, message): + ctor_msg = message + if PY2: + if ctor_msg is not None: + ctor_msg = ctor_msg.encode('utf-8') + Exception.__init__(self, ctor_msg) + self.message = message + + def format_message(self): + return self.message + + def __str__(self): + return self.message + + if PY2: + __unicode__ = __str__ + + def __str__(self): + return self.message.encode('utf-8') + + def show(self, file=None): + if file is None: + file = get_text_stderr() + echo('Error: %s' % self.format_message(), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + exit_code = 2 + + def __init__(self, message, ctx=None): + ClickException.__init__(self, message) + self.ctx = ctx + self.cmd = self.ctx and self.ctx.command or None + + def show(self, file=None): + if file is None: + file = get_text_stderr() + color = None + hint = '' + if (self.cmd is not None and + self.cmd.get_help_option(self.ctx) is not None): + hint = ('Try "%s %s" for help.\n' + % (self.ctx.command_path, self.ctx.help_option_names[0])) + if self.ctx is not None: + color = self.ctx.color + echo(self.ctx.get_usage() + '\n%s' % hint, file=file, color=color) + echo('Error: %s' % self.format_message(), file=file, color=color) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__(self, message, ctx=None, param=None, + param_hint=None): + UsageError.__init__(self, message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self): + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) + else: + return 'Invalid value: %s' % self.message + param_hint = _join_param_hints(param_hint) + + return 'Invalid value for %s: %s' % (param_hint, self.message) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__(self, message=None, ctx=None, param=None, + param_hint=None, param_type=None): + BadParameter.__init__(self, message, ctx, param, param_hint) + self.param_type = param_type + + def format_message(self): + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) + else: + param_hint = None + param_hint = _join_param_hints(param_hint) + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += '. ' + msg_extra + else: + msg = msg_extra + + return 'Missing %s%s%s%s' % ( + param_type, + param_hint and ' %s' % param_hint or '', + msg and '. ' or '.', + msg or '', + ) + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__(self, option_name, message=None, possibilities=None, + ctx=None): + if message is None: + message = 'no such option: %s' % option_name + UsageError.__init__(self, message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self): + bits = [self.message] + if self.possibilities: + if len(self.possibilities) == 1: + bits.append('Did you mean %s?' % self.possibilities[0]) + else: + possibilities = sorted(self.possibilities) + bits.append('(Possible options: %s)' % ', '.join(possibilities)) + return ' '.join(bits) + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__(self, option_name, message, ctx=None): + UsageError.__init__(self, message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + def __init__(self, message, ctx=None): + UsageError.__init__(self, message, ctx) + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename, hint=None): + ui_filename = filename_to_ui(filename) + if hint is None: + hint = 'unknown error' + ClickException.__init__(self, hint) + self.ui_filename = ui_filename + self.filename = filename + + def format_message(self): + return 'Could not open file %s: %s' % (self.ui_filename, self.message) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + def __init__(self, code=0): + self.exit_code = code diff --git a/py3/lib/python3.6/site-packages/click/formatting.py b/py3/lib/python3.6/site-packages/click/formatting.py new file mode 100644 index 0000000..a3d6a4d --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/formatting.py @@ -0,0 +1,256 @@ +from contextlib import contextmanager +from .termui import get_terminal_size +from .parser import split_opt +from ._compat import term_len + + +# Can force a width. This is used by the test system +FORCED_WIDTH = None + + +def measure_table(rows): + widths = {} + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows(rows, col_count): + for row in rows: + row = tuple(row) + yield row + ('',) * (col_count - len(row)) + + +def wrap_text(text, width=78, initial_indent='', subsequent_indent='', + preserve_paragraphs=False): + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + text = text.expandtabs() + wrapper = TextWrapper(width, initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False) + if not preserve_paragraphs: + return wrapper.fill(text) + + p = [] + buf = [] + indent = None + + def _flush_par(): + if not buf: + return + if buf[0].strip() == '\b': + p.append((indent or 0, True, '\n'.join(buf[1:]))) + else: + p.append((indent or 0, False, ' '.join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(' ' * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return '\n\n'.join(rv) + + +class HelpFormatter(object): + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__(self, indent_increment=2, width=None, max_width=None): + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(get_terminal_size()[0], max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer = [] + + def write(self, string): + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self): + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self): + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage(self, prog, args='', prefix='Usage: '): + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: the prefix for the first line. + """ + usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog) + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = ' ' * term_len(usage_prefix) + self.write(wrap_text(args, text_width, + initial_indent=usage_prefix, + subsequent_indent=indent)) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write('\n') + indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4) + self.write(wrap_text(args, text_width, + initial_indent=indent, + subsequent_indent=indent)) + + self.write('\n') + + def write_heading(self, heading): + """Writes a heading into the buffer.""" + self.write('%*s%s:\n' % (self.current_indent, '', heading)) + + def write_paragraph(self): + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write('\n') + + def write_text(self, text): + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + text_width = max(self.width - self.current_indent, 11) + indent = ' ' * self.current_indent + self.write(wrap_text(text, text_width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True)) + self.write('\n') + + def write_dl(self, rows, col_max=30, col_spacing=2): + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError('Expected two columns for definition list') + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write('%*s%s' % (self.current_indent, '', first)) + if not second: + self.write('\n') + continue + if term_len(first) <= first_col - col_spacing: + self.write(' ' * (first_col - term_len(first))) + else: + self.write('\n') + self.write(' ' * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + lines = iter(wrap_text(second, text_width).splitlines()) + if lines: + self.write(next(lines) + '\n') + for line in lines: + self.write('%*s%s\n' % ( + first_col + self.current_indent, '', line)) + else: + self.write('\n') + + @contextmanager + def section(self, name): + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self): + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self): + """Returns the buffer contents.""" + return ''.join(self.buffer) + + +def join_options(options): + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + for opt in options: + prefix = split_opt(opt)[0] + if prefix == '/': + any_prefix_is_slash = True + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + + rv = ', '.join(x[1] for x in rv) + return rv, any_prefix_is_slash diff --git a/py3/lib/python3.6/site-packages/click/globals.py b/py3/lib/python3.6/site-packages/click/globals.py new file mode 100644 index 0000000..843b594 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/globals.py @@ -0,0 +1,48 @@ +from threading import local + + +_local = local() + + +def get_current_context(silent=False): + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: is set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return getattr(_local, 'stack')[-1] + except (AttributeError, IndexError): + if not silent: + raise RuntimeError('There is no active click context.') + + +def push_context(ctx): + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault('stack', []).append(ctx) + + +def pop_context(): + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color=None): + """"Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + ctx = get_current_context(silent=True) + if ctx is not None: + return ctx.color diff --git a/py3/lib/python3.6/site-packages/click/parser.py b/py3/lib/python3.6/site-packages/click/parser.py new file mode 100644 index 0000000..1c3ae9c --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/parser.py @@ -0,0 +1,427 @@ +# -*- coding: utf-8 -*- +""" +click.parser +~~~~~~~~~~~~ + +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. +""" + +import re +from collections import deque +from .exceptions import UsageError, NoSuchOption, BadOptionUsage, \ + BadArgumentUsage + + +def _unpack_args(args, nargs_spec): + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv = [] + spos = None + + def _fetch(c): + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError('Cannot have two nargs < 0') + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1:] = reversed(rv[spos + 1:]) + + return tuple(rv), list(args) + + +def _error_opt_args(nargs, opt): + if nargs == 1: + raise BadOptionUsage(opt, '%s option requires an argument' % opt) + raise BadOptionUsage(opt, '%s option requires %d arguments' % (opt, nargs)) + + +def split_opt(opt): + first = opt[:1] + if first.isalnum(): + return '', opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt, ctx): + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return prefix + ctx.token_normalize_func(opt) + + +def split_arg_string(string): + """Given an argument string this attempts to split it into small parts.""" + rv = [] + for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'" + r'|"([^"\\]*(?:\\.[^"\\]*)*)"' + r'|\S+)\s*', string, re.S): + arg = match.group().strip() + if arg[:1] == arg[-1:] and arg[:1] in '"\'': + arg = arg[1:-1].encode('ascii', 'backslashreplace') \ + .decode('unicode-escape') + try: + arg = type(string)(arg) + except UnicodeError: + pass + rv.append(arg) + return rv + + +class Option(object): + + def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None): + self._short_opts = [] + self._long_opts = [] + self.prefixes = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError('Invalid start character for option (%s)' + % opt) + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = 'store' + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self): + return self.action in ('store', 'append') + + def process(self, value, state): + if self.action == 'store': + state.opts[self.dest] = value + elif self.action == 'store_const': + state.opts[self.dest] = self.const + elif self.action == 'append': + state.opts.setdefault(self.dest, []).append(value) + elif self.action == 'append_const': + state.opts.setdefault(self.dest, []).append(self.const) + elif self.action == 'count': + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 + else: + raise ValueError('unknown action %r' % self.action) + state.order.append(self.obj) + + +class Argument(object): + + def __init__(self, dest, nargs=1, obj=None): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process(self, value, state): + if self.nargs > 1: + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage('argument %s takes %d values' + % (self.dest, self.nargs)) + state.opts[self.dest] = value + state.order.append(self.obj) + + +class ParsingState(object): + + def __init__(self, rargs): + self.opts = {} + self.largs = [] + self.rargs = rargs + self.order = [] + + +class OptionParser(object): + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx=None): + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options = False + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + self._short_opt = {} + self._long_opt = {} + self._opt_prefixes = set(['-', '--']) + self._args = [] + + def add_option(self, opts, dest, action=None, nargs=1, const=None, + obj=None): + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``appnd_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + if obj is None: + obj = dest + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(opts, dest, action=action, nargs=nargs, + const=const, obj=obj) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument(self, dest, nargs=1, obj=None): + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + if obj is None: + obj = dest + self._args.append(Argument(dest=dest, nargs=nargs, obj=obj)) + + def parse_args(self, args): + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state): + pargs, args = _unpack_args(state.largs + state.rargs, + [x.nargs for x in self._args]) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state): + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == '--': + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt(self, opt, explicit_value, state): + if opt not in self._long_opt: + possibilities = [word for word in self._long_opt + if word.startswith(opt)] + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + nargs = option.nargs + if len(state.rargs) < nargs: + _error_opt_args(nargs, opt) + elif nargs == 1: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + elif explicit_value is not None: + raise BadOptionUsage(opt, '%s option does not take a value' % opt) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg, state): + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(prefix + ch, self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + nargs = option.nargs + if len(state.rargs) < nargs: + _error_opt_args(nargs, opt) + elif nargs == 1: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we re-combinate the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(prefix + ''.join(unknown_options)) + + def _process_opts(self, arg, state): + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if '=' in arg: + long_opt, explicit_value = arg.split('=', 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + return self._match_short_opt(arg, state) + if not self.ignore_unknown_options: + raise + state.largs.append(arg) diff --git a/py3/lib/python3.6/site-packages/click/termui.py b/py3/lib/python3.6/site-packages/click/termui.py new file mode 100644 index 0000000..bf9a3aa --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/termui.py @@ -0,0 +1,606 @@ +import os +import sys +import struct +import inspect +import itertools + +from ._compat import raw_input, text_type, string_types, \ + isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WIN +from .utils import echo +from .exceptions import Abort, UsageError +from .types import convert_type, Choice, Path +from .globals import resolve_color_default + + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func = raw_input + +_ansi_colors = { + 'black': 30, + 'red': 31, + 'green': 32, + 'yellow': 33, + 'blue': 34, + 'magenta': 35, + 'cyan': 36, + 'white': 37, + 'reset': 39, + 'bright_black': 90, + 'bright_red': 91, + 'bright_green': 92, + 'bright_yellow': 93, + 'bright_blue': 94, + 'bright_magenta': 95, + 'bright_cyan': 96, + 'bright_white': 97, +} +_ansi_reset_all = '\033[0m' + + +def hidden_prompt_func(prompt): + import getpass + return getpass.getpass(prompt) + + +def _build_prompt(text, suffix, show_default=False, default=None, show_choices=True, type=None): + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += ' (' + ", ".join(map(str, type.choices)) + ')' + if default is not None and show_default: + prompt = '%s [%s]' % (prompt, default) + return prompt + suffix + + +def prompt(text, default=None, hide_input=False, confirmation_prompt=False, + type=None, value_proc=None, prompt_suffix=': ', show_default=True, + err=False, show_choices=True): + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending a interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + .. versionadded:: 7.0 + Added the show_choices parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: asks for confirmation for the value. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + """ + result = None + + def prompt_func(text): + f = hide_input and hidden_prompt_func or visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text, nl=False, err=err) + return f('') + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt(text, prompt_suffix, show_default, default, show_choices, type) + + while 1: + while 1: + value = prompt_func(prompt) + if value: + break + elif default is not None: + if isinstance(value_proc, Path): + # validate Path default value(exists, dir_okay etc.) + value = default + break + return default + try: + result = value_proc(value) + except UsageError as e: + echo('Error: %s' % e.message, err=err) + continue + if not confirmation_prompt: + return result + while 1: + value2 = prompt_func('Repeat for confirmation: ') + if value2: + break + if value == value2: + return result + echo('Error: the two entered values do not match', err=err) + + +def confirm(text, default=False, abort=False, prompt_suffix=': ', + show_default=True, err=False): + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param text: the question to ask. + :param default: the default for the prompt. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + prompt = _build_prompt(text, prompt_suffix, show_default, + default and 'Y/n' or 'y/N') + while 1: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt, nl=False, err=err) + value = visible_prompt_func('').lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() + if value in ('y', 'yes'): + rv = True + elif value in ('n', 'no'): + rv = False + elif value == '': + rv = default + else: + echo('Error: invalid input', err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def get_terminal_size(): + """Returns the current size of the terminal as tuple in the form + ``(width, height)`` in columns and rows. + """ + # If shutil has get_terminal_size() (Python 3.3 and later) use that + if sys.version_info >= (3, 3): + import shutil + shutil_get_terminal_size = getattr(shutil, 'get_terminal_size', None) + if shutil_get_terminal_size: + sz = shutil_get_terminal_size() + return sz.columns, sz.lines + + # We provide a sensible default for get_winterm_size() when being invoked + # inside a subprocess. Without this, it would not provide a useful input. + if get_winterm_size is not None: + size = get_winterm_size() + if size == (0, 0): + return (79, 24) + else: + return size + + def ioctl_gwinsz(fd): + try: + import fcntl + import termios + cr = struct.unpack( + 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) + except Exception: + return + return cr + + cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + try: + cr = ioctl_gwinsz(fd) + finally: + os.close(fd) + except Exception: + pass + if not cr or not cr[0] or not cr[1]: + cr = (os.environ.get('LINES', 25), + os.environ.get('COLUMNS', DEFAULT_COLUMNS)) + return int(cr[1]), int(cr[0]) + + +def echo_via_pager(text_or_generator, color=None): + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = text_or_generator() + elif isinstance(text_or_generator, string_types): + i = [text_or_generator] + else: + i = iter(text_or_generator) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, string_types) else text_type(el) + for el in i) + + from ._termui_impl import pager + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar(iterable=None, length=None, label=None, show_eta=True, + show_percent=None, show_pos=False, + item_show_func=None, fill_char='#', empty_char='-', + bar_template='%(label)s [%(bar)s] %(info)s', + info_sep=' ', width=36, file=None, color=None): + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already displayed. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `color` parameter. Added a `update` method to the + progressbar object. + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: a function called with the current item which + can return a string to show the current item + next to the progress bar. Note that the current + item can be `None`! + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: the file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + """ + from ._termui_impl import ProgressBar + color = resolve_color_default(color) + return ProgressBar(iterable=iterable, length=length, show_eta=show_eta, + show_percent=show_percent, show_pos=show_pos, + item_show_func=item_show_func, fill_char=fill_char, + empty_char=empty_char, bar_template=bar_template, + info_sep=info_sep, file=file, label=label, + width=width, color=color) + + +def clear(): + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + # If we're on Windows and we don't have colorama available, then we + # clear the screen by shelling out. Otherwise we can use an escape + # sequence. + if WIN: + os.system('cls') + else: + sys.stdout.write('\033[2J\033[1;1H') + + +def style(text, fg=None, bg=None, bold=None, dim=None, underline=None, + blink=None, reverse=None, reset=True): + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + .. versionadded:: 2.0 + + .. versionadded:: 7.0 + Added support for bright colors. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + """ + bits = [] + if fg: + try: + bits.append('\033[%dm' % (_ansi_colors[fg])) + except KeyError: + raise TypeError('Unknown color %r' % fg) + if bg: + try: + bits.append('\033[%dm' % (_ansi_colors[bg] + 10)) + except KeyError: + raise TypeError('Unknown color %r' % bg) + if bold is not None: + bits.append('\033[%dm' % (1 if bold else 22)) + if dim is not None: + bits.append('\033[%dm' % (2 if dim else 22)) + if underline is not None: + bits.append('\033[%dm' % (4 if underline else 24)) + if blink is not None: + bits.append('\033[%dm' % (5 if blink else 25)) + if reverse is not None: + bits.append('\033[%dm' % (7 if reverse else 27)) + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return ''.join(bits) + + +def unstyle(text): + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho(message=None, file=None, nl=True, err=False, color=None, **styles): + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + .. versionadded:: 2.0 + """ + if message is not None: + message = style(message, **styles) + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit(text=None, editor=None, env=None, require_save=True, + extension='.txt', filename=None): + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + editor = Editor(editor=editor, env=env, require_save=require_save, + extension=extension) + if filename is None: + return editor.edit(text) + editor.edit_file(filename) + + +def launch(url, wait=False, locate=False): + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: waits for the program to stop. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar = None + + +def getchar(echo=False): + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + f = _getchar + if f is None: + from ._termui_impl import getchar as f + return f(echo) + + +def raw_terminal(): + from ._termui_impl import raw_terminal as f + return f() + + +def pause(info='Press any key to continue ...', err=False): + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: the info string to print before pausing. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/py3/lib/python3.6/site-packages/click/testing.py b/py3/lib/python3.6/site-packages/click/testing.py new file mode 100644 index 0000000..1b2924e --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/testing.py @@ -0,0 +1,374 @@ +import os +import sys +import shutil +import tempfile +import contextlib +import shlex + +from ._compat import iteritems, PY2, string_types + + +# If someone wants to vendor click, we want to ensure the +# correct package is discovered. Ideally we could use a +# relative import here but unfortunately Python does not +# support that. +clickpkg = sys.modules[__name__.rsplit('.', 1)[0]] + + +if PY2: + from cStringIO import StringIO +else: + import io + from ._compat import _find_binary_reader + + +class EchoingStdin(object): + + def __init__(self, input, output): + self._input = input + self._output = output + + def __getattr__(self, x): + return getattr(self._input, x) + + def _echo(self, rv): + self._output.write(rv) + return rv + + def read(self, n=-1): + return self._echo(self._input.read(n)) + + def readline(self, n=-1): + return self._echo(self._input.readline(n)) + + def readlines(self): + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self): + return iter(self._echo(x) for x in self._input) + + def __repr__(self): + return repr(self._input) + + +def make_input_stream(input, charset): + # Is already an input stream. + if hasattr(input, 'read'): + if PY2: + return input + rv = _find_binary_reader(input) + if rv is not None: + return rv + raise TypeError('Could not find binary reader for input stream.') + + if input is None: + input = b'' + elif not isinstance(input, bytes): + input = input.encode(charset) + if PY2: + return StringIO(input) + return io.BytesIO(input) + + +class Result(object): + """Holds the captured result of an invoked CLI script.""" + + def __init__(self, runner, stdout_bytes, stderr_bytes, exit_code, + exception, exc_info=None): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or False(y) if not available + self.stderr_bytes = stderr_bytes + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self): + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self): + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, 'replace') \ + .replace('\r\n', '\n') + + @property + def stderr(self): + """The standard error as unicode string.""" + if not self.stderr_bytes: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, 'replace') \ + .replace('\r\n', '\n') + + + def __repr__(self): + return '<%s %s>' % ( + type(self).__name__, + self.exception and repr(self.exception) or 'okay', + ) + + +class CliRunner(object): + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. This is + UTF-8 by default and should not be changed currently as + the reporting to Click only works in Python 2 properly. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__(self, charset=None, env=None, echo_stdin=False, + mix_stderr=True): + if charset is None: + charset = 'utf-8' + self.charset = charset + self.env = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli): + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or 'root' + + def make_env(self, overrides=None): + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation(self, input=None, env=None, color=False): + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + .. versionadded:: 4.0 + The ``color`` parameter was added. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + """ + input = make_input_stream(input, self.charset) + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = clickpkg.formatting.FORCED_WIDTH + clickpkg.formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + if PY2: + bytes_output = StringIO() + if self.echo_stdin: + input = EchoingStdin(input, bytes_output) + sys.stdout = bytes_output + if not self.mix_stderr: + bytes_error = StringIO() + sys.stderr = bytes_error + else: + bytes_output = io.BytesIO() + if self.echo_stdin: + input = EchoingStdin(input, bytes_output) + input = io.TextIOWrapper(input, encoding=self.charset) + sys.stdout = io.TextIOWrapper( + bytes_output, encoding=self.charset) + if not self.mix_stderr: + bytes_error = io.BytesIO() + sys.stderr = io.TextIOWrapper( + bytes_error, encoding=self.charset) + + if self.mix_stderr: + sys.stderr = sys.stdout + + sys.stdin = input + + def visible_input(prompt=None): + sys.stdout.write(prompt or '') + val = input.readline().rstrip('\r\n') + sys.stdout.write(val + '\n') + sys.stdout.flush() + return val + + def hidden_input(prompt=None): + sys.stdout.write((prompt or '') + '\n') + sys.stdout.flush() + return input.readline().rstrip('\r\n') + + def _getchar(echo): + char = sys.stdin.read(1) + if echo: + sys.stdout.write(char) + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi(stream=None, color=None): + if color is None: + return not default_color + return not color + + old_visible_prompt_func = clickpkg.termui.visible_prompt_func + old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func + old__getchar_func = clickpkg.termui._getchar + old_should_strip_ansi = clickpkg.utils.should_strip_ansi + clickpkg.termui.visible_prompt_func = visible_input + clickpkg.termui.hidden_prompt_func = hidden_input + clickpkg.termui._getchar = _getchar + clickpkg.utils.should_strip_ansi = should_strip_ansi + + old_env = {} + try: + for key, value in iteritems(env): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, not self.mix_stderr and bytes_error) + finally: + for key, value in iteritems(old_env): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + clickpkg.termui.visible_prompt_func = old_visible_prompt_func + clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func + clickpkg.termui._getchar = old__getchar_func + clickpkg.utils.should_strip_ansi = old_should_strip_ansi + clickpkg.formatting.FORCED_WIDTH = old_forced_width + + def invoke(self, cli, args=None, input=None, env=None, + catch_exceptions=True, color=False, mix_stderr=False, **extra): + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + .. versionadded:: 3.0 + The ``catch_exceptions`` parameter was added. + + .. versionchanged:: 3.0 + The result object now has an `exc_info` attribute with the + traceback if available. + + .. versionadded:: 4.0 + The ``color`` parameter was added. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + exception = None + exit_code = 0 + + if isinstance(args, string_types): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + exit_code = e.code + if exit_code is None: + exit_code = 0 + + if exit_code != 0: + exception = e + + if not isinstance(exit_code, int): + sys.stdout.write(str(exit_code)) + sys.stdout.write('\n') + exit_code = 1 + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + stderr = outstreams[1] and outstreams[1].getvalue() + + return Result(runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + exit_code=exit_code, + exception=exception, + exc_info=exc_info) + + @contextlib.contextmanager + def isolated_filesystem(self): + """A context manager that creates a temporary folder and changes + the current working directory to it for isolated filesystem tests. + """ + cwd = os.getcwd() + t = tempfile.mkdtemp() + os.chdir(t) + try: + yield t + finally: + os.chdir(cwd) + try: + shutil.rmtree(t) + except (OSError, IOError): + pass diff --git a/py3/lib/python3.6/site-packages/click/types.py b/py3/lib/python3.6/site-packages/click/types.py new file mode 100644 index 0000000..1f88032 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/types.py @@ -0,0 +1,668 @@ +import os +import stat +from datetime import datetime + +from ._compat import open_stream, text_type, filename_to_ui, \ + get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2 +from .exceptions import BadParameter +from .utils import safecall, LazyFile + + +class ParamType(object): + """Helper for converting values through types. The following is + necessary for a valid type: + + * it needs a name + * it needs to pass through None unchanged + * it needs to convert from a string + * it needs to convert its result type through unchanged + (eg: needs to be idempotent) + * it needs to be able to deal with param and context being `None`. + This can be the case when the object is used with prompt + inputs. + """ + is_composite = False + + #: the descriptive name of this type + name = None + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter = None + + def __call__(self, value, param=None, ctx=None): + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param): + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param): + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert(self, value, param, ctx): + """Converts the value. This is not invoked for values that are + `None` (the missing value). + """ + return value + + def split_envvar_value(self, rv): + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or '').split(self.envvar_list_splitter) + + def fail(self, message, param=None, ctx=None): + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self): + raise NotImplementedError() + + +class FuncParamType(ParamType): + + def __init__(self, func): + self.name = func.__name__ + self.func = func + + def convert(self, value, param, ctx): + try: + return self.func(value) + except ValueError: + try: + value = text_type(value) + except UnicodeError: + value = str(value).decode('utf-8', 'replace') + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = 'text' + + def convert(self, value, param, ctx): + return value + + def __repr__(self): + return 'UNPROCESSED' + + +class StringParamType(ParamType): + name = 'text' + + def convert(self, value, param, ctx): + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = get_filesystem_encoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode('utf-8', 'replace') + return value + return value + + def __repr__(self): + return 'STRING' + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = 'choice' + + def __init__(self, choices, case_sensitive=True): + self.choices = choices + self.case_sensitive = case_sensitive + + def get_metavar(self, param): + return '[%s]' % '|'.join(self.choices) + + def get_missing_message(self, param): + return 'Choose from:\n\t%s.' % ',\n\t'.join(self.choices) + + def convert(self, value, param, ctx): + # Exact match + if value in self.choices: + return value + + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = self.choices + + if ctx is not None and \ + ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = [ctx.token_normalize_func(choice) for choice in + self.choices] + + if not self.case_sensitive: + normed_value = normed_value.lower() + normed_choices = [choice.lower() for choice in normed_choices] + + if normed_value in normed_choices: + return normed_value + + self.fail('invalid choice: %s. (choose from %s)' % + (value, ', '.join(self.choices)), param, ctx) + + def __repr__(self): + return 'Choice(%r)' % list(self.choices) + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + name = 'datetime' + + def __init__(self, formats=None): + self.formats = formats or [ + '%Y-%m-%d', + '%Y-%m-%dT%H:%M:%S', + '%Y-%m-%d %H:%M:%S' + ] + + def get_metavar(self, param): + return '[{}]'.format('|'.join(self.formats)) + + def _try_to_convert_date(self, value, format): + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert(self, value, param, ctx): + # Exact match + for format in self.formats: + dtime = self._try_to_convert_date(value, format) + if dtime: + return dtime + + self.fail( + 'invalid datetime format: {}. (choose from {})'.format( + value, ', '.join(self.formats))) + + def __repr__(self): + return 'DateTime' + + +class IntParamType(ParamType): + name = 'integer' + + def convert(self, value, param, ctx): + try: + return int(value) + except (ValueError, UnicodeError): + self.fail('%s is not a valid integer' % value, param, ctx) + + def __repr__(self): + return 'INT' + + +class IntRange(IntParamType): + """A parameter that works similar to :data:`click.INT` but restricts + the value to fit into a range. The default behavior is to fail if the + value falls outside the range, but it can also be silently clamped + between the two edges. + + See :ref:`ranges` for an example. + """ + name = 'integer range' + + def __init__(self, min=None, max=None, clamp=False): + self.min = min + self.max = max + self.clamp = clamp + + def convert(self, value, param, ctx): + rv = IntParamType.convert(self, value, param, ctx) + if self.clamp: + if self.min is not None and rv < self.min: + return self.min + if self.max is not None and rv > self.max: + return self.max + if self.min is not None and rv < self.min or \ + self.max is not None and rv > self.max: + if self.min is None: + self.fail('%s is bigger than the maximum valid value ' + '%s.' % (rv, self.max), param, ctx) + elif self.max is None: + self.fail('%s is smaller than the minimum valid value ' + '%s.' % (rv, self.min), param, ctx) + else: + self.fail('%s is not in the valid range of %s to %s.' + % (rv, self.min, self.max), param, ctx) + return rv + + def __repr__(self): + return 'IntRange(%r, %r)' % (self.min, self.max) + + +class FloatParamType(ParamType): + name = 'float' + + def convert(self, value, param, ctx): + try: + return float(value) + except (UnicodeError, ValueError): + self.fail('%s is not a valid floating point value' % + value, param, ctx) + + def __repr__(self): + return 'FLOAT' + + +class FloatRange(FloatParamType): + """A parameter that works similar to :data:`click.FLOAT` but restricts + the value to fit into a range. The default behavior is to fail if the + value falls outside the range, but it can also be silently clamped + between the two edges. + + See :ref:`ranges` for an example. + """ + name = 'float range' + + def __init__(self, min=None, max=None, clamp=False): + self.min = min + self.max = max + self.clamp = clamp + + def convert(self, value, param, ctx): + rv = FloatParamType.convert(self, value, param, ctx) + if self.clamp: + if self.min is not None and rv < self.min: + return self.min + if self.max is not None and rv > self.max: + return self.max + if self.min is not None and rv < self.min or \ + self.max is not None and rv > self.max: + if self.min is None: + self.fail('%s is bigger than the maximum valid value ' + '%s.' % (rv, self.max), param, ctx) + elif self.max is None: + self.fail('%s is smaller than the minimum valid value ' + '%s.' % (rv, self.min), param, ctx) + else: + self.fail('%s is not in the valid range of %s to %s.' + % (rv, self.min, self.max), param, ctx) + return rv + + def __repr__(self): + return 'FloatRange(%r, %r)' % (self.min, self.max) + + +class BoolParamType(ParamType): + name = 'boolean' + + def convert(self, value, param, ctx): + if isinstance(value, bool): + return bool(value) + value = value.lower() + if value in ('true', 't', '1', 'yes', 'y'): + return True + elif value in ('false', 'f', '0', 'no', 'n'): + return False + self.fail('%s is not a valid boolean' % value, param, ctx) + + def __repr__(self): + return 'BOOL' + + +class UUIDParameterType(ParamType): + name = 'uuid' + + def convert(self, value, param, ctx): + import uuid + try: + if PY2 and isinstance(value, text_type): + value = value.encode('ascii') + return uuid.UUID(value) + except (UnicodeError, ValueError): + self.fail('%s is not a valid UUID value' % value, param, ctx) + + def __repr__(self): + return 'UUID' + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + name = 'filename' + envvar_list_splitter = os.path.pathsep + + def __init__(self, mode='r', encoding=None, errors='strict', lazy=None, + atomic=False): + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def resolve_lazy_flag(self, value): + if self.lazy is not None: + return self.lazy + if value == '-': + return False + elif 'w' in self.mode: + return True + return False + + def convert(self, value, param, ctx): + try: + if hasattr(value, 'read') or hasattr(value, 'write'): + return value + + lazy = self.resolve_lazy_flag(value) + + if lazy: + f = LazyFile(value, self.mode, self.encoding, self.errors, + atomic=self.atomic) + if ctx is not None: + ctx.call_on_close(f.close_intelligently) + return f + + f, should_close = open_stream(value, self.mode, + self.encoding, self.errors, + atomic=self.atomic) + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + return f + except (IOError, OSError) as e: + self.fail('Could not open file: %s: %s' % ( + filename_to_ui(value), + get_streerror(e), + ), param, ctx) + + +class Path(ParamType): + """The path type is similar to the :class:`File` type but it performs + different checks. First of all, instead of returning an open file + handle it returns just the filename. Secondly, it can perform various + basic checks about what the file or directory should be. + + .. versionchanged:: 6.0 + `allow_dash` was added. + + :param exists: if set to true, the file or directory needs to exist for + this value to be valid. If this is not required and a + file does indeed not exist, then all further checks are + silently skipped. + :param file_okay: controls if a file is a possible value. + :param dir_okay: controls if a directory is a possible value. + :param writable: if true, a writable check is performed. + :param readable: if true, a readable check is performed. + :param resolve_path: if this is true, then the path is fully resolved + before the value is passed onwards. This means + that it's absolute and symlinks are resolved. It + will not expand a tilde-prefix, as this is + supposed to be done by the shell only. + :param allow_dash: If this is set to `True`, a single dash to indicate + standard streams is permitted. + :param path_type: optionally a string type that should be used to + represent the path. The default is `None` which + means the return value will be either bytes or + unicode depending on what makes most sense given the + input data Click deals with. + """ + envvar_list_splitter = os.path.pathsep + + def __init__(self, exists=False, file_okay=True, dir_okay=True, + writable=False, readable=True, resolve_path=False, + allow_dash=False, path_type=None): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.writable = writable + self.readable = readable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name = 'file' + self.path_type = 'File' + elif self.dir_okay and not self.file_okay: + self.name = 'directory' + self.path_type = 'Directory' + else: + self.name = 'path' + self.path_type = 'Path' + + def coerce_path_result(self, rv): + if self.type is not None and not isinstance(rv, self.type): + if self.type is text_type: + rv = rv.decode(get_filesystem_encoding()) + else: + rv = rv.encode(get_filesystem_encoding()) + return rv + + def convert(self, value, param, ctx): + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-') + + if not is_dash: + if self.resolve_path: + rv = os.path.realpath(rv) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail('%s "%s" does not exist.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail('%s "%s" is a file.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail('%s "%s" is a directory.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + if self.writable and not os.access(value, os.W_OK): + self.fail('%s "%s" is not writable.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + if self.readable and not os.access(value, os.R_OK): + self.fail('%s "%s" is not readable.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + + return self.coerce_path_result(rv) + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types): + self.types = [convert_type(ty) for ty in types] + + @property + def name(self): + return "<" + " ".join(ty.name for ty in self.types) + ">" + + @property + def arity(self): + return len(self.types) + + def convert(self, value, param, ctx): + if len(value) != len(self.types): + raise TypeError('It would appear that nargs is set to conflict ' + 'with the composite type arity.') + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty, default=None): + """Converts a callable or python ty into the most appropriate param + ty. + """ + guessed_type = False + if ty is None and default is not None: + if isinstance(default, tuple): + ty = tuple(map(type, default)) + else: + ty = type(default) + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + if isinstance(ty, ParamType): + return ty + if ty is text_type or ty is str or ty is None: + return STRING + if ty is int: + return INT + # Booleans are only okay if not guessed. This is done because for + # flags the default value is actually a bit of a lie in that it + # indicates which of the flags is the one we want. See get_default() + # for more information. + if ty is bool and not guessed_type: + return BOOL + if ty is float: + return FLOAT + if guessed_type: + return STRING + + # Catch a common mistake + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError('Attempted to use an uninstantiated ' + 'parameter type (%s).' % ty) + except TypeError: + pass + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but internally +#: no string conversion takes place. This is necessary to achieve the +#: same bytes/unicode behavior on Python 2/3 in situations where you want +#: to not convert argument types. This is usually useful when working +#: with file paths as they can appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/py3/lib/python3.6/site-packages/click/utils.py b/py3/lib/python3.6/site-packages/click/utils.py new file mode 100644 index 0000000..fc84369 --- /dev/null +++ b/py3/lib/python3.6/site-packages/click/utils.py @@ -0,0 +1,440 @@ +import os +import sys + +from .globals import resolve_color_default + +from ._compat import text_type, open_stream, get_filesystem_encoding, \ + get_streerror, string_types, PY2, binary_streams, text_streams, \ + filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \ + _default_text_stdout, _default_text_stderr, is_bytes, WIN + +if not PY2: + from ._compat import _find_binary_writer +elif WIN: + from ._winconsole import _get_windows_argv, \ + _hash_py_argv, _initial_argv_hash + + +echo_native_types = string_types + (bytes, bytearray) + + +def _posixify(name): + return '-'.join(name.split()).lower() + + +def safecall(func): + """Wraps a function so that it swallows exceptions.""" + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception: + pass + return wrapper + + +def make_str(value): + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(get_filesystem_encoding()) + except UnicodeError: + return value.decode('utf-8', 'replace') + return text_type(value) + + +def make_default_short_help(help, max_length=45): + """Return a condensed version of help string.""" + words = help.split() + total_length = 0 + result = [] + done = False + + for word in words: + if word[-1:] == '.': + done = True + new_length = result and 1 + len(word) or len(word) + if total_length + new_length > max_length: + result.append('...') + done = True + else: + if result: + result.append(' ') + result.append(word) + if done: + break + total_length += new_length + + return ''.join(result) + + +class LazyFile(object): + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__(self, filename, mode='r', encoding=None, errors='strict', + atomic=False): + self.name = filename + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + + if filename == '-': + self._f, self.should_close = open_stream(filename, mode, + encoding, errors) + else: + if 'r' in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name): + return getattr(self.open(), name) + + def __repr__(self): + if self._f is not None: + return repr(self._f) + return '' % (self.name, self.mode) + + def open(self): + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream(self.name, self.mode, + self.encoding, + self.errors, + atomic=self.atomic) + except (IOError, OSError) as e: + from .exceptions import FileError + raise FileError(self.name, hint=get_streerror(e)) + self._f = rv + return rv + + def close(self): + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self): + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close_intelligently() + + def __iter__(self): + self.open() + return iter(self._f) + + +class KeepOpenFile(object): + + def __init__(self, file): + self._file = file + + def __getattr__(self, name): + return getattr(self._file, name) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + pass + + def __repr__(self): + return repr(self._file) + + def __iter__(self): + return iter(self._file) + + +def echo(message=None, file=None, nl=True, err=False, color=None): + """Prints a message plus a newline to the given file or stdout. On + first sight, this looks like the print function, but it has improved + support for handling Unicode and binary data that does not fail no + matter how badly configured the system is. + + Primarily it means that you can print binary data as well as Unicode + data on both 2.x and 3.x to the given file in the most appropriate way + possible. This is a very carefree function in that it will try its + best to not fail. As of Click 6.0 this includes support for unicode + output on the Windows console. + + In addition to that, if `colorama`_ is installed, the echo function will + also support clever handling of ANSI codes. Essentially it will then + do the following: + + - add transparent handling of ANSI color codes on Windows. + - hide ANSI codes automatically if the destination file is not a + terminal. + + .. _colorama: https://pypi.org/project/colorama/ + + .. versionchanged:: 6.0 + As of Click 6.0 the echo function will properly support unicode + output on the windows console. Not that click does not modify + the interpreter in any way which means that `sys.stdout` or the + print statement or function will still not provide unicode support. + + .. versionchanged:: 2.0 + Starting with version 2.0 of Click, the echo function will work + with colorama if it's installed. + + .. versionadded:: 3.0 + The `err` parameter was added. + + .. versionchanged:: 4.0 + Added the `color` flag. + + :param message: the message to print + :param file: the file to write to (defaults to ``stdout``) + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``. This is faster and easier than calling + :func:`get_text_stderr` yourself. + :param nl: if set to `True` (the default) a newline is printed afterwards. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, echo_native_types): + message = text_type(message) + + if nl: + message = message or u'' + if isinstance(message, text_type): + message += u'\n' + else: + message += b'\n' + + # If there is a message, and we're in Python 3, and the value looks + # like bytes, we manually need to find the binary stream and write the + # message in there. This is done separately so that most stream + # types will work as you would expect. Eg: you can write to StringIO + # for other cases. + if message and not PY2 and is_bytes(message): + binary_file = _find_binary_writer(file) + if binary_file is not None: + file.flush() + binary_file.write(message) + binary_file.flush() + return + + # ANSI-style support. If there is no message or we are dealing with + # bytes nothing is happening. If we are connected to a file we want + # to strip colors. If we are on windows we either wrap the stream + # to strip the color or we use the colorama support to translate the + # ansi codes to API calls. + if message and not is_bytes(message): + color = resolve_color_default(color) + if should_strip_ansi(file, color): + message = strip_ansi(message) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) + elif not color: + message = strip_ansi(message) + + if message: + file.write(message) + file.flush() + + +def get_binary_stream(name): + """Returns a system stream for byte processing. This essentially + returns the stream from the sys module with the given name but it + solves some compatibility issues between different Python versions. + Primarily this function is necessary for getting binary streams on + Python 3. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError('Unknown standard stream %r' % name) + return opener() + + +def get_text_stream(name, encoding=None, errors='strict'): + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts on Python 3 + for already correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError('Unknown standard stream %r' % name) + return opener(encoding, errors) + + +def open_file(filename, mode='r', encoding=None, errors='strict', + lazy=False, atomic=False): + """This is similar to how the :class:`File` works but for manual + usage. Files are opened non lazy by default. This can open regular + files as well as stdin/stdout if ``'-'`` is passed. + + If stdin/stdout is returned the stream is wrapped so that the context + manager will not close the stream accidentally. This makes it possible + to always use the function like this without having to worry to + accidentally close a standard stream:: + + with open_file(filename) as f: + ... + + .. versionadded:: 3.0 + + :param filename: the name of the file to open (or ``'-'`` for stdin/stdout). + :param mode: the mode in which to open the file. + :param encoding: the encoding to use. + :param errors: the error handling for this file. + :param lazy: can be flipped to true to open the file lazily. + :param atomic: in atomic mode writes go into a temporary file and it's + moved on close. + """ + if lazy: + return LazyFile(filename, mode, encoding, errors, atomic=atomic) + f, should_close = open_stream(filename, mode, encoding, errors, + atomic=atomic) + if not should_close: + f = KeepOpenFile(f) + return f + + +def get_os_args(): + """This returns the argument part of sys.argv in the most appropriate + form for processing. What this means is that this return value is in + a format that works for Click to process but does not necessarily + correspond well to what's actually standard for the interpreter. + + On most environments the return value is ``sys.argv[:1]`` unchanged. + However if you are on Windows and running Python 2 the return value + will actually be a list of unicode strings instead because the + default behavior on that platform otherwise will not be able to + carry all possible values that sys.argv can have. + + .. versionadded:: 6.0 + """ + # We can only extract the unicode argv if sys.argv has not been + # changed since the startup of the application. + if PY2 and WIN and _initial_argv_hash == _hash_py_argv(): + return _get_windows_argv() + return sys.argv[1:] + + +def format_filename(filename, shorten=False): + """Formats a filename for user display. The main purpose of this + function is to ensure that the filename can be displayed at all. This + will decode the filename to unicode if necessary in a way that it will + not fail. Optionally, it can shorten the filename to not include the + full path to the filename. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + return filename_to_ui(filename) + + +def get_app_dir(app_name, roaming=True, force_posix=False): + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Win XP (roaming): + ``C:\Documents and Settings\\Local Settings\Application Data\Foo Bar`` + Win XP (not roaming): + ``C:\Documents and Settings\\Application Data\Foo Bar`` + Win 7 (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Win 7 (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no affect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = roaming and 'APPDATA' or 'LOCALAPPDATA' + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser('~') + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser('~/.' + _posixify(app_name))) + if sys.platform == 'darwin': + return os.path.join(os.path.expanduser( + '~/Library/Application Support'), app_name) + return os.path.join( + os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')), + _posixify(app_name)) + + +class PacifyFlushWrapper(object): + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped): + self.wrapped = wrapped + + def flush(self): + try: + self.wrapped.flush() + except IOError as e: + import errno + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr): + return getattr(self.wrapped, attr) diff --git a/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/METADATA b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/METADATA new file mode 100644 index 0000000..e7b9e5a --- /dev/null +++ b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/METADATA @@ -0,0 +1,92 @@ +Metadata-Version: 2.1 +Name: dataclasses +Version: 0.6 +Summary: A backport of the dataclasses module for Python 3.6 +Home-page: https://github.com/ericvsmith/dataclasses +Author: Eric V. Smith +Author-email: eric@python.org +License: Apache +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3.6 + +.. image:: https://img.shields.io/pypi/v/dataclasses.svg + + +This is an implementation of PEP 557, Data Classes. It is a backport +for Python 3.6. Because dataclasses will be included in Python 3.7, +any discussion of dataclass features should occur on the python-dev +mailing list at https://mail.python.org/mailman/listinfo/python-dev. +At this point this repo should only be used for historical purposes +(it's where the original dataclasses discussions took place) and for +discussion of the actual backport to Python 3.6. + +See https://www.python.org/dev/peps/pep-0557/ for the details of how +Data Classes work. + +A test file can be found at +https://github.com/ericvsmith/dataclasses/blob/master/test_dataclasses.py, +or in the sdist file. + +Installation +------------- + +.. code-block:: + + pip install dataclasses + + +Example Usage +------------- + +.. code-block:: python + + from dataclasses import dataclass + + @dataclass + class InventoryItem: + name: str + unit_price: float + quantity_on_hand: int = 0 + + def total_cost(self) -> float: + return self.unit_price * self.quantity_on_hand + + item = InventoryItem('hammers', 10.49, 12) + print(item.total_cost()) + +Some additional tools can be found in dataclass_tools.py, included in +the sdist. + +Compatibility +------------- + +This backport assumes that dict objects retain their sort order. This +is true in the language spec for Python 3.7 and greater. Since this +is a backport to Python 3.6, it raises an interesting question: does +that guarantee apply to 3.6? For CPython 3.6 it does. As of the time +of this writing, it's also true for all other Python implementations +that claim to be 3.6 compatible, of which there are none. Any new +3.6 implementations are expected to have ordered dicts. See the +analysis at the end of this email: + +https://mail.python.org/pipermail/python-dev/2017-December/151325.html + +As of version 0.4, this code no longer works with Python 3.7. For 3.7, +use the built-in dataclasses module. + +Release History +--------------- + ++---------+------------+-------------------------------------+ +| Version | Date | Description | ++=========+============+=====================================+ +| 0.6 | 2018-05-17 | Equivalent to Python 3.7.0rc1 | ++---------+------------+-------------------------------------+ +| 0.5 | 2018-03-28 | Equivalent to Python 3.7.0b3 | ++---------+------------+-------------------------------------+ + + diff --git a/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/RECORD b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/RECORD new file mode 100644 index 0000000..bd7c2c3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/RECORD @@ -0,0 +1,7 @@ +dataclasses.py,sha256=WwkMl6vUvIeyaicLU90FcakhRJK9SyQUYTYP1-wre7s,45208 +dataclasses-0.6.dist-info/METADATA,sha256=VyUITm4sTFdq3a0uBGjF2lkZ0n64NEVrL7vJjxn8nFI,2989 +dataclasses-0.6.dist-info/RECORD,, +dataclasses-0.6.dist-info/WHEEL,sha256=FRuHuBX28RTgsOL603qIpbSc2WeDgbsFBWKj9XFOCpg,92 +dataclasses-0.6.dist-info/top_level.txt,sha256=g1h_lLdyfM-aL3-M-rWnNGxWv3ZmULp2HVvAlyZq5s0,12 +dataclasses-0.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/dataclasses.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/WHEEL b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/WHEEL new file mode 100644 index 0000000..d1acba1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/top_level.txt new file mode 100644 index 0000000..a46627b --- /dev/null +++ b/py3/lib/python3.6/site-packages/dataclasses-0.6.dist-info/top_level.txt @@ -0,0 +1 @@ +dataclasses diff --git a/py3/lib/python3.6/site-packages/dataclasses.py b/py3/lib/python3.6/site-packages/dataclasses.py new file mode 100644 index 0000000..ba34f6b --- /dev/null +++ b/py3/lib/python3.6/site-packages/dataclasses.py @@ -0,0 +1,1177 @@ +import re +import sys +import copy +import types +import inspect +import keyword + +__all__ = ['dataclass', + 'field', + 'Field', + 'FrozenInstanceError', + 'InitVar', + 'MISSING', + + # Helper functions. + 'fields', + 'asdict', + 'astuple', + 'make_dataclass', + 'replace', + 'is_dataclass', + ] + +# Conditions for adding methods. The boxes indicate what action the +# dataclass decorator takes. For all of these tables, when I talk +# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm +# referring to the arguments to the @dataclass decorator. When +# checking if a dunder method already exists, I mean check for an +# entry in the class's __dict__. I never check to see if an attribute +# is defined in a base class. + +# Key: +# +=========+=========================================+ +# + Value | Meaning | +# +=========+=========================================+ +# | | No action: no method is added. | +# +---------+-----------------------------------------+ +# | add | Generated method is added. | +# +---------+-----------------------------------------+ +# | raise | TypeError is raised. | +# +---------+-----------------------------------------+ +# | None | Attribute is set to None. | +# +=========+=========================================+ + +# __init__ +# +# +--- init= parameter +# | +# v | | | +# | no | yes | <--- class has __init__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + +# __repr__ +# +# +--- repr= parameter +# | +# v | | | +# | no | yes | <--- class has __repr__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + + +# __setattr__ +# __delattr__ +# +# +--- frozen= parameter +# | +# v | | | +# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__? +# +=======+=======+=======+ +# | False | | | <- the default +# +-------+-------+-------+ +# | True | add | raise | +# +=======+=======+=======+ +# Raise because not adding these methods would break the "frozen-ness" +# of the class. + +# __eq__ +# +# +--- eq= parameter +# | +# v | | | +# | no | yes | <--- class has __eq__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + +# __lt__ +# __le__ +# __gt__ +# __ge__ +# +# +--- order= parameter +# | +# v | | | +# | no | yes | <--- class has any comparison method in __dict__? +# +=======+=======+=======+ +# | False | | | <- the default +# +-------+-------+-------+ +# | True | add | raise | +# +=======+=======+=======+ +# Raise because to allow this case would interfere with using +# functools.total_ordering. + +# __hash__ + +# +------------------- unsafe_hash= parameter +# | +----------- eq= parameter +# | | +--- frozen= parameter +# | | | +# v v v | | | +# | no | yes | <--- class has explicitly defined __hash__ +# +=======+=======+=======+========+========+ +# | False | False | False | | | No __eq__, use the base class __hash__ +# +-------+-------+-------+--------+--------+ +# | False | False | True | | | No __eq__, use the base class __hash__ +# +-------+-------+-------+--------+--------+ +# | False | True | False | None | | <-- the default, not hashable +# +-------+-------+-------+--------+--------+ +# | False | True | True | add | | Frozen, so hashable, allows override +# +-------+-------+-------+--------+--------+ +# | True | False | False | add | raise | Has no __eq__, but hashable +# +-------+-------+-------+--------+--------+ +# | True | False | True | add | raise | Has no __eq__, but hashable +# +-------+-------+-------+--------+--------+ +# | True | True | False | add | raise | Not frozen, but hashable +# +-------+-------+-------+--------+--------+ +# | True | True | True | add | raise | Frozen, so hashable +# +=======+=======+=======+========+========+ +# For boxes that are blank, __hash__ is untouched and therefore +# inherited from the base class. If the base is object, then +# id-based hashing is used. +# +# Note that a class may already have __hash__=None if it specified an +# __eq__ method in the class body (not one that was created by +# @dataclass). +# +# See _hash_action (below) for a coded version of this table. + + +# Raised when an attempt is made to modify a frozen class. +class FrozenInstanceError(AttributeError): pass + +# A sentinel object for default values to signal that a default +# factory will be used. This is given a nice repr() which will appear +# in the function signature of dataclasses' constructors. +class _HAS_DEFAULT_FACTORY_CLASS: + def __repr__(self): + return '' +_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS() + +# A sentinel object to detect if a parameter is supplied or not. Use +# a class to give it a better repr. +class _MISSING_TYPE: + pass +MISSING = _MISSING_TYPE() + +# Since most per-field metadata will be unused, create an empty +# read-only proxy that can be shared among all fields. +_EMPTY_METADATA = types.MappingProxyType({}) + +# Markers for the various kinds of fields and pseudo-fields. +class _FIELD_BASE: + def __init__(self, name): + self.name = name + def __repr__(self): + return self.name +_FIELD = _FIELD_BASE('_FIELD') +_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR') +_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR') + +# The name of an attribute on the class where we store the Field +# objects. Also used to check if a class is a Data Class. +_FIELDS = '__dataclass_fields__' + +# The name of an attribute on the class that stores the parameters to +# @dataclass. +_PARAMS = '__dataclass_params__' + +# The name of the function, that if it exists, is called at the end of +# __init__. +_POST_INIT_NAME = '__post_init__' + +# String regex that string annotations for ClassVar or InitVar must match. +# Allows "identifier.identifier[" or "identifier[". +# https://bugs.python.org/issue33453 for details. +_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)') + +class _InitVarMeta(type): + def __getitem__(self, params): + return self + +class InitVar(metaclass=_InitVarMeta): + pass + + +# Instances of Field are only ever created from within this module, +# and only from the field() function, although Field instances are +# exposed externally as (conceptually) read-only objects. +# +# name and type are filled in after the fact, not in __init__. +# They're not known at the time this class is instantiated, but it's +# convenient if they're available later. +# +# When cls._FIELDS is filled in with a list of Field objects, the name +# and type fields will have been populated. +class Field: + __slots__ = ('name', + 'type', + 'default', + 'default_factory', + 'repr', + 'hash', + 'init', + 'compare', + 'metadata', + '_field_type', # Private: not to be used by user code. + ) + + def __init__(self, default, default_factory, init, repr, hash, compare, + metadata): + self.name = None + self.type = None + self.default = default + self.default_factory = default_factory + self.init = init + self.repr = repr + self.hash = hash + self.compare = compare + self.metadata = (_EMPTY_METADATA + if metadata is None or len(metadata) == 0 else + types.MappingProxyType(metadata)) + self._field_type = None + + def __repr__(self): + return ('Field(' + f'name={self.name!r},' + f'type={self.type!r},' + f'default={self.default!r},' + f'default_factory={self.default_factory!r},' + f'init={self.init!r},' + f'repr={self.repr!r},' + f'hash={self.hash!r},' + f'compare={self.compare!r},' + f'metadata={self.metadata!r},' + f'_field_type={self._field_type}' + ')') + + # This is used to support the PEP 487 __set_name__ protocol in the + # case where we're using a field that contains a descriptor as a + # defaul value. For details on __set_name__, see + # https://www.python.org/dev/peps/pep-0487/#implementation-details. + # + # Note that in _process_class, this Field object is overwritten + # with the default value, so the end result is a descriptor that + # had __set_name__ called on it at the right time. + def __set_name__(self, owner, name): + func = getattr(type(self.default), '__set_name__', None) + if func: + # There is a __set_name__ method on the descriptor, call + # it. + func(self.default, owner, name) + + +class _DataclassParams: + __slots__ = ('init', + 'repr', + 'eq', + 'order', + 'unsafe_hash', + 'frozen', + ) + + def __init__(self, init, repr, eq, order, unsafe_hash, frozen): + self.init = init + self.repr = repr + self.eq = eq + self.order = order + self.unsafe_hash = unsafe_hash + self.frozen = frozen + + def __repr__(self): + return ('_DataclassParams(' + f'init={self.init!r},' + f'repr={self.repr!r},' + f'eq={self.eq!r},' + f'order={self.order!r},' + f'unsafe_hash={self.unsafe_hash!r},' + f'frozen={self.frozen!r}' + ')') + + +# This function is used instead of exposing Field creation directly, +# so that a type checker can be told (via overloads) that this is a +# function whose type depends on its parameters. +def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, + hash=None, compare=True, metadata=None): + """Return an object to identify dataclass fields. + + default is the default value of the field. default_factory is a + 0-argument function called to initialize a field's value. If init + is True, the field will be a parameter to the class's __init__() + function. If repr is True, the field will be included in the + object's repr(). If hash is True, the field will be included in + the object's hash(). If compare is True, the field will be used + in comparison functions. metadata, if specified, must be a + mapping which is stored but not otherwise examined by dataclass. + + It is an error to specify both default and default_factory. + """ + + if default is not MISSING and default_factory is not MISSING: + raise ValueError('cannot specify both default and default_factory') + return Field(default, default_factory, init, repr, hash, compare, + metadata) + + +def _tuple_str(obj_name, fields): + # Return a string representing each field of obj_name as a tuple + # member. So, if fields is ['x', 'y'] and obj_name is "self", + # return "(self.x,self.y)". + + # Special case for the 0-tuple. + if not fields: + return '()' + # Note the trailing comma, needed if this turns out to be a 1-tuple. + return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)' + + +def _create_fn(name, args, body, *, globals=None, locals=None, + return_type=MISSING): + # Note that we mutate locals when exec() is called. Caller + # beware! The only callers are internal to this module, so no + # worries about external callers. + if locals is None: + locals = {} + return_annotation = '' + if return_type is not MISSING: + locals['_return_type'] = return_type + return_annotation = '->_return_type' + args = ','.join(args) + body = '\n'.join(f' {b}' for b in body) + + # Compute the text of the entire function. + txt = f'def {name}({args}){return_annotation}:\n{body}' + + exec(txt, globals, locals) + return locals[name] + + +def _field_assign(frozen, name, value, self_name): + # If we're a frozen class, then assign to our fields in __init__ + # via object.__setattr__. Otherwise, just use a simple + # assignment. + # + # self_name is what "self" is called in this function: don't + # hard-code "self", since that might be a field name. + if frozen: + return f'object.__setattr__({self_name},{name!r},{value})' + return f'{self_name}.{name}={value}' + + +def _field_init(f, frozen, globals, self_name): + # Return the text of the line in the body of __init__ that will + # initialize this field. + + default_name = f'_dflt_{f.name}' + if f.default_factory is not MISSING: + if f.init: + # This field has a default factory. If a parameter is + # given, use it. If not, call the factory. + globals[default_name] = f.default_factory + value = (f'{default_name}() ' + f'if {f.name} is _HAS_DEFAULT_FACTORY ' + f'else {f.name}') + else: + # This is a field that's not in the __init__ params, but + # has a default factory function. It needs to be + # initialized here by calling the factory function, + # because there's no other way to initialize it. + + # For a field initialized with a default=defaultvalue, the + # class dict just has the default value + # (cls.fieldname=defaultvalue). But that won't work for a + # default factory, the factory must be called in __init__ + # and we must assign that to self.fieldname. We can't + # fall back to the class dict's value, both because it's + # not set, and because it might be different per-class + # (which, after all, is why we have a factory function!). + + globals[default_name] = f.default_factory + value = f'{default_name}()' + else: + # No default factory. + if f.init: + if f.default is MISSING: + # There's no default, just do an assignment. + value = f.name + elif f.default is not MISSING: + globals[default_name] = f.default + value = f.name + else: + # This field does not need initialization. Signify that + # to the caller by returning None. + return None + + # Only test this now, so that we can create variables for the + # default. However, return None to signify that we're not going + # to actually do the assignment statement for InitVars. + if f._field_type == _FIELD_INITVAR: + return None + + # Now, actually generate the field assignment. + return _field_assign(frozen, f.name, value, self_name) + + +def _init_param(f): + # Return the __init__ parameter string for this field. For + # example, the equivalent of 'x:int=3' (except instead of 'int', + # reference a variable set to int, and instead of '3', reference a + # variable set to 3). + if f.default is MISSING and f.default_factory is MISSING: + # There's no default, and no default_factory, just output the + # variable name and type. + default = '' + elif f.default is not MISSING: + # There's a default, this will be the name that's used to look + # it up. + default = f'=_dflt_{f.name}' + elif f.default_factory is not MISSING: + # There's a factory function. Set a marker. + default = '=_HAS_DEFAULT_FACTORY' + return f'{f.name}:_type_{f.name}{default}' + + +def _init_fn(fields, frozen, has_post_init, self_name): + # fields contains both real fields and InitVar pseudo-fields. + + # Make sure we don't have fields without defaults following fields + # with defaults. This actually would be caught when exec-ing the + # function source code, but catching it here gives a better error + # message, and future-proofs us in case we build up the function + # using ast. + seen_default = False + for f in fields: + # Only consider fields in the __init__ call. + if f.init: + if not (f.default is MISSING and f.default_factory is MISSING): + seen_default = True + elif seen_default: + raise TypeError(f'non-default argument {f.name!r} ' + 'follows default argument') + + globals = {'MISSING': MISSING, + '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY} + + body_lines = [] + for f in fields: + line = _field_init(f, frozen, globals, self_name) + # line is None means that this field doesn't require + # initialization (it's a pseudo-field). Just skip it. + if line: + body_lines.append(line) + + # Does this class have a post-init function? + if has_post_init: + params_str = ','.join(f.name for f in fields + if f._field_type is _FIELD_INITVAR) + body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})') + + # If no body lines, use 'pass'. + if not body_lines: + body_lines = ['pass'] + + locals = {f'_type_{f.name}': f.type for f in fields} + return _create_fn('__init__', + [self_name] + [_init_param(f) for f in fields if f.init], + body_lines, + locals=locals, + globals=globals, + return_type=None) + + +def _repr_fn(fields): + return _create_fn('__repr__', + ('self',), + ['return self.__class__.__qualname__ + f"(' + + ', '.join([f"{f.name}={{self.{f.name}!r}}" + for f in fields]) + + ')"']) + + +def _frozen_get_del_attr(cls, fields): + # XXX: globals is modified on the first call to _create_fn, then + # the modified version is used in the second call. Is this okay? + globals = {'cls': cls, + 'FrozenInstanceError': FrozenInstanceError} + if fields: + fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)' + else: + # Special case for the zero-length tuple. + fields_str = '()' + return (_create_fn('__setattr__', + ('self', 'name', 'value'), + (f'if type(self) is cls or name in {fields_str}:', + ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', + f'super(cls, self).__setattr__(name, value)'), + globals=globals), + _create_fn('__delattr__', + ('self', 'name'), + (f'if type(self) is cls or name in {fields_str}:', + ' raise FrozenInstanceError(f"cannot delete field {name!r}")', + f'super(cls, self).__delattr__(name)'), + globals=globals), + ) + + +def _cmp_fn(name, op, self_tuple, other_tuple): + # Create a comparison function. If the fields in the object are + # named 'x' and 'y', then self_tuple is the string + # '(self.x,self.y)' and other_tuple is the string + # '(other.x,other.y)'. + + return _create_fn(name, + ('self', 'other'), + [ 'if other.__class__ is self.__class__:', + f' return {self_tuple}{op}{other_tuple}', + 'return NotImplemented']) + + +def _hash_fn(fields): + self_tuple = _tuple_str('self', fields) + return _create_fn('__hash__', + ('self',), + [f'return hash({self_tuple})']) + + +def _is_classvar(a_type, typing): + # This test uses a typing internal class, but it's the best way to + # test if this is a ClassVar. + return type(a_type) is typing._ClassVar + + +def _is_initvar(a_type, dataclasses): + # The module we're checking against is the module we're + # currently in (dataclasses.py). + return a_type is dataclasses.InitVar + + +def _is_type(annotation, cls, a_module, a_type, is_type_predicate): + # Given a type annotation string, does it refer to a_type in + # a_module? For example, when checking that annotation denotes a + # ClassVar, then a_module is typing, and a_type is + # typing.ClassVar. + + # It's possible to look up a_module given a_type, but it involves + # looking in sys.modules (again!), and seems like a waste since + # the caller already knows a_module. + + # - annotation is a string type annotation + # - cls is the class that this annotation was found in + # - a_module is the module we want to match + # - a_type is the type in that module we want to match + # - is_type_predicate is a function called with (obj, a_module) + # that determines if obj is of the desired type. + + # Since this test does not do a local namespace lookup (and + # instead only a module (global) lookup), there are some things it + # gets wrong. + + # With string annotations, cv0 will be detected as a ClassVar: + # CV = ClassVar + # @dataclass + # class C0: + # cv0: CV + + # But in this example cv1 will not be detected as a ClassVar: + # @dataclass + # class C1: + # CV = ClassVar + # cv1: CV + + # In C1, the code in this function (_is_type) will look up "CV" in + # the module and not find it, so it will not consider cv1 as a + # ClassVar. This is a fairly obscure corner case, and the best + # way to fix it would be to eval() the string "CV" with the + # correct global and local namespaces. However that would involve + # a eval() penalty for every single field of every dataclass + # that's defined. It was judged not worth it. + + match = _MODULE_IDENTIFIER_RE.match(annotation) + if match: + ns = None + module_name = match.group(1) + if not module_name: + # No module name, assume the class's module did + # "from dataclasses import InitVar". + ns = sys.modules.get(cls.__module__).__dict__ + else: + # Look up module_name in the class's module. + module = sys.modules.get(cls.__module__) + if module and module.__dict__.get(module_name) is a_module: + ns = sys.modules.get(a_type.__module__).__dict__ + if ns and is_type_predicate(ns.get(match.group(2)), a_module): + return True + return False + + +def _get_field(cls, a_name, a_type): + # Return a Field object for this field name and type. ClassVars + # and InitVars are also returned, but marked as such (see + # f._field_type). + + # If the default value isn't derived from Field, then it's only a + # normal default value. Convert it to a Field(). + default = getattr(cls, a_name, MISSING) + if isinstance(default, Field): + f = default + else: + if isinstance(default, types.MemberDescriptorType): + # This is a field in __slots__, so it has no default value. + default = MISSING + f = field(default=default) + + # Only at this point do we know the name and the type. Set them. + f.name = a_name + f.type = a_type + + # Assume it's a normal field until proven otherwise. We're next + # going to decide if it's a ClassVar or InitVar, everything else + # is just a normal field. + f._field_type = _FIELD + + # In addition to checking for actual types here, also check for + # string annotations. get_type_hints() won't always work for us + # (see https://github.com/python/typing/issues/508 for example), + # plus it's expensive and would require an eval for every stirng + # annotation. So, make a best effort to see if this is a ClassVar + # or InitVar using regex's and checking that the thing referenced + # is actually of the correct type. + + # For the complete discussion, see https://bugs.python.org/issue33453 + + # If typing has not been imported, then it's impossible for any + # annotation to be a ClassVar. So, only look for ClassVar if + # typing has been imported by any module (not necessarily cls's + # module). + typing = sys.modules.get('typing') + if typing: + if (_is_classvar(a_type, typing) + or (isinstance(f.type, str) + and _is_type(f.type, cls, typing, typing.ClassVar, + _is_classvar))): + f._field_type = _FIELD_CLASSVAR + + # If the type is InitVar, or if it's a matching string annotation, + # then it's an InitVar. + if f._field_type is _FIELD: + # The module we're checking against is the module we're + # currently in (dataclasses.py). + dataclasses = sys.modules[__name__] + if (_is_initvar(a_type, dataclasses) + or (isinstance(f.type, str) + and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, + _is_initvar))): + f._field_type = _FIELD_INITVAR + + # Validations for individual fields. This is delayed until now, + # instead of in the Field() constructor, since only here do we + # know the field name, which allows for better error reporting. + + # Special restrictions for ClassVar and InitVar. + if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR): + if f.default_factory is not MISSING: + raise TypeError(f'field {f.name} cannot have a ' + 'default factory') + # Should I check for other field settings? default_factory + # seems the most serious to check for. Maybe add others. For + # example, how about init=False (or really, + # init=)? It makes no sense for + # ClassVar and InitVar to specify init=. + + # For real fields, disallow mutable defaults for known types. + if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): + raise ValueError(f'mutable default {type(f.default)} for field ' + f'{f.name} is not allowed: use default_factory') + + return f + + +def _set_new_attribute(cls, name, value): + # Never overwrites an existing attribute. Returns True if the + # attribute already exists. + if name in cls.__dict__: + return True + setattr(cls, name, value) + return False + + +# Decide if/how we're going to create a hash function. Key is +# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to +# take. The common case is to do nothing, so instead of providing a +# function that is a no-op, use None to signify that. + +def _hash_set_none(cls, fields): + return None + +def _hash_add(cls, fields): + flds = [f for f in fields if (f.compare if f.hash is None else f.hash)] + return _hash_fn(flds) + +def _hash_exception(cls, fields): + # Raise an exception. + raise TypeError(f'Cannot overwrite attribute __hash__ ' + f'in class {cls.__name__}') + +# +# +-------------------------------------- unsafe_hash? +# | +------------------------------- eq? +# | | +------------------------ frozen? +# | | | +---------------- has-explicit-hash? +# | | | | +# | | | | +------- action +# | | | | | +# v v v v v +_hash_action = {(False, False, False, False): None, + (False, False, False, True ): None, + (False, False, True, False): None, + (False, False, True, True ): None, + (False, True, False, False): _hash_set_none, + (False, True, False, True ): None, + (False, True, True, False): _hash_add, + (False, True, True, True ): None, + (True, False, False, False): _hash_add, + (True, False, False, True ): _hash_exception, + (True, False, True, False): _hash_add, + (True, False, True, True ): _hash_exception, + (True, True, False, False): _hash_add, + (True, True, False, True ): _hash_exception, + (True, True, True, False): _hash_add, + (True, True, True, True ): _hash_exception, + } +# See https://bugs.python.org/issue32929#msg312829 for an if-statement +# version of this table. + + +def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): + # Now that dicts retain insertion order, there's no reason to use + # an ordered dict. I am leveraging that ordering here, because + # derived class fields overwrite base class fields, but the order + # is defined by the base class, which is found first. + fields = {} + + setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order, + unsafe_hash, frozen)) + + # Find our base classes in reverse MRO order, and exclude + # ourselves. In reversed order so that more derived classes + # override earlier field definitions in base classes. As long as + # we're iterating over them, see if any are frozen. + any_frozen_base = False + has_dataclass_bases = False + for b in cls.__mro__[-1:0:-1]: + # Only process classes that have been processed by our + # decorator. That is, they have a _FIELDS attribute. + base_fields = getattr(b, _FIELDS, None) + if base_fields: + has_dataclass_bases = True + for f in base_fields.values(): + fields[f.name] = f + if getattr(b, _PARAMS).frozen: + any_frozen_base = True + + # Annotations that are defined in this class (not in base + # classes). If __annotations__ isn't present, then this class + # adds no new annotations. We use this to compute fields that are + # added by this class. + # + # Fields are found from cls_annotations, which is guaranteed to be + # ordered. Default values are from class attributes, if a field + # has a default. If the default value is a Field(), then it + # contains additional info beyond (and possibly including) the + # actual default value. Pseudo-fields ClassVars and InitVars are + # included, despite the fact that they're not real fields. That's + # dealt with later. + cls_annotations = cls.__dict__.get('__annotations__', {}) + + # Now find fields in our class. While doing so, validate some + # things, and set the default values (as class attributes) where + # we can. + cls_fields = [_get_field(cls, name, type) + for name, type in cls_annotations.items()] + for f in cls_fields: + fields[f.name] = f + + # If the class attribute (which is the default value for this + # field) exists and is of type 'Field', replace it with the + # real default. This is so that normal class introspection + # sees a real default value, not a Field. + if isinstance(getattr(cls, f.name, None), Field): + if f.default is MISSING: + # If there's no default, delete the class attribute. + # This happens if we specify field(repr=False), for + # example (that is, we specified a field object, but + # no default value). Also if we're using a default + # factory. The class attribute should not be set at + # all in the post-processed class. + delattr(cls, f.name) + else: + setattr(cls, f.name, f.default) + + # Do we have any Field members that don't also have annotations? + for name, value in cls.__dict__.items(): + if isinstance(value, Field) and not name in cls_annotations: + raise TypeError(f'{name!r} is a field but has no type annotation') + + # Check rules that apply if we are derived from any dataclasses. + if has_dataclass_bases: + # Raise an exception if any of our bases are frozen, but we're not. + if any_frozen_base and not frozen: + raise TypeError('cannot inherit non-frozen dataclass from a ' + 'frozen one') + + # Raise an exception if we're frozen, but none of our bases are. + if not any_frozen_base and frozen: + raise TypeError('cannot inherit frozen dataclass from a ' + 'non-frozen one') + + # Remember all of the fields on our class (including bases). This + # also marks this class as being a dataclass. + setattr(cls, _FIELDS, fields) + + # Was this class defined with an explicit __hash__? Note that if + # __eq__ is defined in this class, then python will automatically + # set __hash__ to None. This is a heuristic, as it's possible + # that such a __hash__ == None was not auto-generated, but it + # close enough. + class_hash = cls.__dict__.get('__hash__', MISSING) + has_explicit_hash = not (class_hash is MISSING or + (class_hash is None and '__eq__' in cls.__dict__)) + + # If we're generating ordering methods, we must be generating the + # eq methods. + if order and not eq: + raise ValueError('eq must be true if order is true') + + if init: + # Does this class have a post-init function? + has_post_init = hasattr(cls, _POST_INIT_NAME) + + # Include InitVars and regular fields (so, not ClassVars). + flds = [f for f in fields.values() + if f._field_type in (_FIELD, _FIELD_INITVAR)] + _set_new_attribute(cls, '__init__', + _init_fn(flds, + frozen, + has_post_init, + # The name to use for the "self" + # param in __init__. Use "self" + # if possible. + '__dataclass_self__' if 'self' in fields + else 'self', + )) + + # Get the fields as a list, and include only real fields. This is + # used in all of the following methods. + field_list = [f for f in fields.values() if f._field_type is _FIELD] + + if repr: + flds = [f for f in field_list if f.repr] + _set_new_attribute(cls, '__repr__', _repr_fn(flds)) + + if eq: + # Create _eq__ method. There's no need for a __ne__ method, + # since python will call __eq__ and negate it. + flds = [f for f in field_list if f.compare] + self_tuple = _tuple_str('self', flds) + other_tuple = _tuple_str('other', flds) + _set_new_attribute(cls, '__eq__', + _cmp_fn('__eq__', '==', + self_tuple, other_tuple)) + + if order: + # Create and set the ordering methods. + flds = [f for f in field_list if f.compare] + self_tuple = _tuple_str('self', flds) + other_tuple = _tuple_str('other', flds) + for name, op in [('__lt__', '<'), + ('__le__', '<='), + ('__gt__', '>'), + ('__ge__', '>='), + ]: + if _set_new_attribute(cls, name, + _cmp_fn(name, op, self_tuple, other_tuple)): + raise TypeError(f'Cannot overwrite attribute {name} ' + f'in class {cls.__name__}. Consider using ' + 'functools.total_ordering') + + if frozen: + for fn in _frozen_get_del_attr(cls, field_list): + if _set_new_attribute(cls, fn.__name__, fn): + raise TypeError(f'Cannot overwrite attribute {fn.__name__} ' + f'in class {cls.__name__}') + + # Decide if/how we're going to create a hash function. + hash_action = _hash_action[bool(unsafe_hash), + bool(eq), + bool(frozen), + has_explicit_hash] + if hash_action: + # No need to call _set_new_attribute here, since by the time + # we're here the overwriting is unconditional. + cls.__hash__ = hash_action(cls, field_list) + + if not getattr(cls, '__doc__'): + # Create a class doc-string. + cls.__doc__ = (cls.__name__ + + str(inspect.signature(cls)).replace(' -> None', '')) + + return cls + + +# _cls should never be specified by keyword, so start it with an +# underscore. The presence of _cls is used to detect if this +# decorator is being called with parameters or not. +def dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False, + unsafe_hash=False, frozen=False): + """Returns the same class as was passed in, with dunder methods + added based on the fields defined in the class. + + Examines PEP 526 __annotations__ to determine fields. + + If init is true, an __init__() method is added to the class. If + repr is true, a __repr__() method is added. If order is true, rich + comparison dunder methods are added. If unsafe_hash is true, a + __hash__() method function is added. If frozen is true, fields may + not be assigned to after instance creation. + """ + + def wrap(cls): + return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen) + + # See if we're being called as @dataclass or @dataclass(). + if _cls is None: + # We're called with parens. + return wrap + + # We're called as @dataclass without parens. + return wrap(_cls) + + +def fields(class_or_instance): + """Return a tuple describing the fields of this dataclass. + + Accepts a dataclass or an instance of one. Tuple elements are of + type Field. + """ + + # Might it be worth caching this, per class? + try: + fields = getattr(class_or_instance, _FIELDS) + except AttributeError: + raise TypeError('must be called with a dataclass type or instance') + + # Exclude pseudo-fields. Note that fields is sorted by insertion + # order, so the order of the tuple is as the fields were defined. + return tuple(f for f in fields.values() if f._field_type is _FIELD) + + +def _is_dataclass_instance(obj): + """Returns True if obj is an instance of a dataclass.""" + return not isinstance(obj, type) and hasattr(obj, _FIELDS) + + +def is_dataclass(obj): + """Returns True if obj is a dataclass or an instance of a + dataclass.""" + return hasattr(obj, _FIELDS) + + +def asdict(obj, *, dict_factory=dict): + """Return the fields of a dataclass instance as a new dictionary mapping + field names to field values. + + Example usage: + + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert asdict(c) == {'x': 1, 'y': 2} + + If given, 'dict_factory' will be used instead of built-in dict. + The function applies recursively to field values that are + dataclass instances. This will also look into built-in containers: + tuples, lists, and dicts. + """ + if not _is_dataclass_instance(obj): + raise TypeError("asdict() should be called on dataclass instances") + return _asdict_inner(obj, dict_factory) + + +def _asdict_inner(obj, dict_factory): + if _is_dataclass_instance(obj): + result = [] + for f in fields(obj): + value = _asdict_inner(getattr(obj, f.name), dict_factory) + result.append((f.name, value)) + return dict_factory(result) + elif isinstance(obj, (list, tuple)): + return type(obj)(_asdict_inner(v, dict_factory) for v in obj) + elif isinstance(obj, dict): + return type(obj)((_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory)) + for k, v in obj.items()) + else: + return copy.deepcopy(obj) + + +def astuple(obj, *, tuple_factory=tuple): + """Return the fields of a dataclass instance as a new tuple of field values. + + Example usage:: + + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert astuple(c) == (1, 2) + + If given, 'tuple_factory' will be used instead of built-in tuple. + The function applies recursively to field values that are + dataclass instances. This will also look into built-in containers: + tuples, lists, and dicts. + """ + + if not _is_dataclass_instance(obj): + raise TypeError("astuple() should be called on dataclass instances") + return _astuple_inner(obj, tuple_factory) + + +def _astuple_inner(obj, tuple_factory): + if _is_dataclass_instance(obj): + result = [] + for f in fields(obj): + value = _astuple_inner(getattr(obj, f.name), tuple_factory) + result.append(value) + return tuple_factory(result) + elif isinstance(obj, (list, tuple)): + return type(obj)(_astuple_inner(v, tuple_factory) for v in obj) + elif isinstance(obj, dict): + return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory)) + for k, v in obj.items()) + else: + return copy.deepcopy(obj) + + +def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True, + repr=True, eq=True, order=False, unsafe_hash=False, + frozen=False): + """Return a new dynamically created dataclass. + + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'. + + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + + is equivalent to: + + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) + + For the bases and namespace parameters, see the builtin type() function. + + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ + + if namespace is None: + namespace = {} + else: + # Copy namespace since we're going to mutate it. + namespace = namespace.copy() + + # While we're looking through the field names, validate that they + # are identifiers, are not keywords, and not duplicates. + seen = set() + anns = {} + for item in fields: + if isinstance(item, str): + name = item + tp = 'typing.Any' + elif len(item) == 2: + name, tp, = item + elif len(item) == 3: + name, tp, spec = item + namespace[name] = spec + else: + raise TypeError(f'Invalid field: {item!r}') + + if not isinstance(name, str) or not name.isidentifier(): + raise TypeError(f'Field names must be valid identifers: {name!r}') + if keyword.iskeyword(name): + raise TypeError(f'Field names must not be keywords: {name!r}') + if name in seen: + raise TypeError(f'Field name duplicated: {name!r}') + + seen.add(name) + anns[name] = tp + + namespace['__annotations__'] = anns + # We use `types.new_class()` instead of simply `type()` to allow dynamic creation + # of generic dataclassses. + cls = types.new_class(cls_name, bases, {}, lambda ns: ns.update(namespace)) + return dataclass(cls, init=init, repr=repr, eq=eq, order=order, + unsafe_hash=unsafe_hash, frozen=frozen) + + +def replace(obj, **changes): + """Return a new object replacing specified fields with new values. + + This is especially useful for frozen classes. Example usage: + + @dataclass(frozen=True) + class C: + x: int + y: int + + c = C(1, 2) + c1 = replace(c, x=3) + assert c1.x == 3 and c1.y == 2 + """ + + # We're going to mutate 'changes', but that's okay because it's a + # new dict, even if called with 'replace(obj, **my_changes)'. + + if not _is_dataclass_instance(obj): + raise TypeError("replace() should be called on dataclass instances") + + # It's an error to have init=False fields in 'changes'. + # If a field is not in 'changes', read its value from the provided obj. + + for f in getattr(obj, _FIELDS).values(): + if not f.init: + # Error if this field is specified in changes. + if f.name in changes: + raise ValueError(f'field {f.name} is declared with ' + 'init=False, it cannot be specified with ' + 'replace()') + continue + + if f.name not in changes: + changes[f.name] = getattr(obj, f.name) + + # Create the new object, which calls __init__() and + # __post_init__() (if defined), using all of the init fields we've + # added and/or left in 'changes'. If there are values supplied in + # changes that aren't fields, this will correctly raise a + # TypeError. + return obj.__class__(**changes) diff --git a/py3/lib/python3.6/site-packages/dateutil/__init__.py b/py3/lib/python3.6/site-packages/dateutil/__init__.py new file mode 100644 index 0000000..0defb82 --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +try: + from ._version import version as __version__ +except ImportError: + __version__ = 'unknown' + +__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', + 'utils', 'zoneinfo'] diff --git a/py3/lib/python3.6/site-packages/dateutil/_common.py b/py3/lib/python3.6/site-packages/dateutil/_common.py new file mode 100644 index 0000000..4eb2659 --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/_common.py @@ -0,0 +1,43 @@ +""" +Common code used in multiple modules. +""" + + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __hash__(self): + return hash(( + self.weekday, + self.n, + )) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +# vim:ts=4:sw=4:et diff --git a/py3/lib/python3.6/site-packages/dateutil/_version.py b/py3/lib/python3.6/site-packages/dateutil/_version.py new file mode 100644 index 0000000..670d7ab --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/_version.py @@ -0,0 +1,4 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '2.8.0' diff --git a/py3/lib/python3.6/site-packages/dateutil/easter.py b/py3/lib/python3.6/site-packages/dateutil/easter.py new file mode 100644 index 0000000..53b7c78 --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/easter.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic easter computing method for any given year, using +Western, Orthodox or Julian algorithms. +""" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different easter + calculation methods: + + 1 - Original calculation in Julian calendar, valid in + dates after 326 AD + 2 - Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3 - Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + * ``EASTER_JULIAN = 1`` + * ``EASTER_ORTHODOX = 2`` + * ``EASTER_WESTERN = 3`` + + The default method is method 3. + + More about the algorithm may be found at: + + `GM Arts: Easter Algorithms `_ + + and + + `The Calendar FAQ: Easter `_ + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e + y//100 - 16 - (y//100 - 16)//4 + else: + # New method + c = y//100 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 + return datetime.date(int(y), int(m), int(d)) diff --git a/py3/lib/python3.6/site-packages/dateutil/parser/__init__.py b/py3/lib/python3.6/site-packages/dateutil/parser/__init__.py new file mode 100644 index 0000000..216762c --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/parser/__init__.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +from ._parser import parse, parser, parserinfo +from ._parser import DEFAULTPARSER, DEFAULTTZPARSER +from ._parser import UnknownTimezoneWarning + +from ._parser import __doc__ + +from .isoparser import isoparser, isoparse + +__all__ = ['parse', 'parser', 'parserinfo', + 'isoparse', 'isoparser', + 'UnknownTimezoneWarning'] + + +### +# Deprecate portions of the private interface so that downstream code that +# is improperly relying on it is given *some* notice. + + +def __deprecated_private_func(f): + from functools import wraps + import warnings + + msg = ('{name} is a private function and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=f.__name__) + + @wraps(f) + def deprecated_func(*args, **kwargs): + warnings.warn(msg, DeprecationWarning) + return f(*args, **kwargs) + + return deprecated_func + +def __deprecate_private_class(c): + import warnings + + msg = ('{name} is a private class and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=c.__name__) + + class private_class(c): + __doc__ = c.__doc__ + + def __init__(self, *args, **kwargs): + warnings.warn(msg, DeprecationWarning) + super(private_class, self).__init__(*args, **kwargs) + + private_class.__name__ = c.__name__ + + return private_class + + +from ._parser import _timelex, _resultbase +from ._parser import _tzparser, _parsetz + +_timelex = __deprecate_private_class(_timelex) +_tzparser = __deprecate_private_class(_tzparser) +_resultbase = __deprecate_private_class(_resultbase) +_parsetz = __deprecated_private_func(_parsetz) diff --git a/py3/lib/python3.6/site-packages/dateutil/parser/_parser.py b/py3/lib/python3.6/site-packages/dateutil/parser/_parser.py new file mode 100644 index 0000000..0da0f3e --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/parser/_parser.py @@ -0,0 +1,1580 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. + +This module attempts to be forgiving with regards to unlikely input formats, +returning a datetime object even for dates which are ambiguous. If an element +of a date/time stamp is omitted, the following rules are applied: + +- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour + on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is + specified. +- If a time zone is omitted, a timezone-naive datetime is returned. + +If any other elements are missing, they are taken from the +:class:`datetime.datetime` object passed to the parameter ``default``. If this +results in a day number exceeding the valid number of days per month, the +value falls back to the end of the month. + +Additional resources about date/time string formats can be found below: + +- `A summary of the international standard date and time notation + `_ +- `W3C Date and Time Formats `_ +- `Time Formats (Planetary Rings Node) `_ +- `CPAN ParseDate module + `_ +- `Java SimpleDateFormat Class + `_ +""" +from __future__ import unicode_literals + +import datetime +import re +import string +import time +import warnings + +from calendar import monthrange +from io import StringIO + +import six +from six import integer_types, text_type + +from decimal import Decimal + +from warnings import warn + +from .. import relativedelta +from .. import tz + +__all__ = ["parse", "parserinfo"] + + +# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth +# making public and/or figuring out if there is something we can +# take off their plate. +class _timelex(object): + # Fractional seconds are sometimes split by a comma + _split_decimal = re.compile("([.,])") + + def __init__(self, instream): + if six.PY2: + # In Python 2, we can't duck type properly because unicode has + # a 'decode' function, and we'd be double-decoding + if isinstance(instream, (bytes, bytearray)): + instream = instream.decode() + else: + if getattr(instream, 'decode', None) is not None: + instream = instream.decode() + + if isinstance(instream, text_type): + instream = StringIO(instream) + elif getattr(instream, 'read', None) is None: + raise TypeError('Parser must be a string or character stream, not ' + '{itype}'.format(itype=instream.__class__.__name__)) + + self.instream = instream + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + """ + This function breaks the time string into lexical units (tokens), which + can be parsed by the parser. Lexical units are demarcated by changes in + the character set, so any continuous string of letters is considered + one unit, any continuous string of numbers is considered one unit. + + The main complication arises from the fact that dots ('.') can be used + both as separators (e.g. "Sep.20.2009") or decimal points (e.g. + "4:30:21.447"). As such, it is necessary to read the full context of + any dot-separated strings before breaking it into tokens; as such, this + function maintains a "token stack", for when the ambiguous context + demands that multiple tokens be parsed at once. + """ + if self.tokenstack: + return self.tokenstack.pop(0) + + seenletters = False + token = None + state = None + + while not self.eof: + # We only realize that we've reached the end of a token when we + # find a character that's not part of the current token - since + # that character may be part of the next token, it's stored in the + # charstack. + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + + if not nextchar: + self.eof = True + break + elif not state: + # First character of the token - determines if we're starting + # to parse a word, a number or something else. + token = nextchar + if self.isword(nextchar): + state = 'a' + elif self.isnum(nextchar): + state = '0' + elif self.isspace(nextchar): + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + # If we've already started reading a word, we keep reading + # letters until we find something that's not part of a word. + seenletters = True + if self.isword(nextchar): + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + # If we've already started reading a number, we keep reading + # numbers until we find something that doesn't fit. + if self.isnum(nextchar): + token += nextchar + elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + # If we've seen some letters and a dot separator, continue + # parsing, and the tokens will be broken up later. + seenletters = True + if nextchar == '.' or self.isword(nextchar): + token += nextchar + elif self.isnum(nextchar) and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + # If we've seen at least one dot separator, keep going, we'll + # break up the tokens later. + if nextchar == '.' or self.isnum(nextchar): + token += nextchar + elif self.isword(nextchar) and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] in '.,')): + l = self._split_decimal.split(token) + token = l[0] + for tok in l[1:]: + if tok: + self.tokenstack.append(tok) + + if state == '0.' and token.count('.') == 0: + token = token.replace(',', '.') + + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + + return token + + def next(self): + return self.__next__() # Python 2.x support + + @classmethod + def split(cls, s): + return list(cls(s)) + + @classmethod + def isword(cls, nextchar): + """ Whether or not the next character is part of a word """ + return nextchar.isalpha() + + @classmethod + def isnum(cls, nextchar): + """ Whether the next character is part of a number """ + return nextchar.isdigit() + + @classmethod + def isspace(cls, nextchar): + """ Whether the next character is whitespace """ + return nextchar.isspace() + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __len__(self): + return (sum(getattr(self, attr) is not None + for attr in self.__slots__)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + """ + Class which handles what inputs are accepted. Subclass this to customize + the language and acceptable values for each parameter. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. Default is ``False``. + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + Default is ``False``. + """ + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), # TODO: "Tues" + ("Wed", "Wednesday"), + ("Thu", "Thursday"), # TODO: "Thurs" + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), # TODO: "Febr" + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z", "z"] + PERTAIN = ["of"] + TZOFFSET = {} + # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", + # "Anno Domini", "Year of Our Lord"] + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year // 100 * 100 + + def _convert(self, lst): + dct = {} + for i, v in enumerate(lst): + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + try: + return self._months[name.lower()] + 1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + + return self.TZOFFSET.get(name) + + def convertyear(self, year, century_specified=False): + """ + Converts two-digit years to year within [-50, 49] + range of self._year (current local time) + """ + + # Function contract is that the year is always positive + assert year >= 0 + + if year < 100 and not century_specified: + # assume current century to start + year += self._century + + if year >= self._year + 50: # if too far in future + year -= 100 + elif year < self._year - 50: # if too far in past + year += 100 + + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year, res.century_specified) + + if ((res.tzoffset == 0 and not res.tzname) or + (res.tzname == 'Z' or res.tzname == 'z')): + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class _ymd(list): + def __init__(self, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + self.century_specified = False + self.dstridx = None + self.mstridx = None + self.ystridx = None + + @property + def has_year(self): + return self.ystridx is not None + + @property + def has_month(self): + return self.mstridx is not None + + @property + def has_day(self): + return self.dstridx is not None + + def could_be_day(self, value): + if self.has_day: + return False + elif not self.has_month: + return 1 <= value <= 31 + elif not self.has_year: + # Be permissive, assume leapyear + month = self[self.mstridx] + return 1 <= value <= monthrange(2000, month)[1] + else: + month = self[self.mstridx] + year = self[self.ystridx] + return 1 <= value <= monthrange(year, month)[1] + + def append(self, val, label=None): + if hasattr(val, '__len__'): + if val.isdigit() and len(val) > 2: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + elif val > 100: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + + super(self.__class__, self).append(int(val)) + + if label == 'M': + if self.has_month: + raise ValueError('Month is already set') + self.mstridx = len(self) - 1 + elif label == 'D': + if self.has_day: + raise ValueError('Day is already set') + self.dstridx = len(self) - 1 + elif label == 'Y': + if self.has_year: + raise ValueError('Year is already set') + self.ystridx = len(self) - 1 + + def _resolve_from_stridxs(self, strids): + """ + Try to resolve the identities of year/month/day elements using + ystridx, mstridx, and dstridx, if enough of these are specified. + """ + if len(self) == 3 and len(strids) == 2: + # we can back out the remaining stridx value + missing = [x for x in range(3) if x not in strids.values()] + key = [x for x in ['y', 'm', 'd'] if x not in strids] + assert len(missing) == len(key) == 1 + key = key[0] + val = missing[0] + strids[key] = val + + assert len(self) == len(strids) # otherwise this should not be called + out = {key: self[strids[key]] for key in strids} + return (out.get('y'), out.get('m'), out.get('d')) + + def resolve_ymd(self, yearfirst, dayfirst): + len_ymd = len(self) + year, month, day = (None, None, None) + + strids = (('y', self.ystridx), + ('m', self.mstridx), + ('d', self.dstridx)) + + strids = {key: val for key, val in strids if val is not None} + if (len(self) == len(strids) > 0 or + (len(self) == 3 and len(strids) == 2)): + return self._resolve_from_stridxs(strids) + + mstridx = self.mstridx + + if len_ymd > 3: + raise ValueError("More than three YMD values") + elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): + # One member, or two members with a month string + if mstridx is not None: + month = self[mstridx] + # since mstridx is 0 or 1, self[mstridx-1] always + # looks up the other element + other = self[mstridx - 1] + else: + other = self[0] + + if len_ymd > 1 or mstridx is None: + if other > 31: + year = other + else: + day = other + + elif len_ymd == 2: + # Two members with numbers + if self[0] > 31: + # 99-01 + year, month = self + elif self[1] > 31: + # 01-99 + month, year = self + elif dayfirst and self[1] <= 12: + # 13-01 + day, month = self + else: + # 01-13 + month, day = self + + elif len_ymd == 3: + # Three members + if mstridx == 0: + if self[1] > 31: + # Apr-2003-25 + month, year, day = self + else: + month, day, year = self + elif mstridx == 1: + if self[0] > 31 or (yearfirst and self[2] <= 31): + # 99-Jan-01 + year, month, day = self + else: + # 01-Jan-01 + # Give precendence to day-first, since + # two-digit years is usually hand-written. + day, month, year = self + + elif mstridx == 2: + # WTF!? + if self[1] > 31: + # 01-99-Jan + day, year, month = self + else: + # 99-01-Jan + year, day, month = self + + else: + if (self[0] > 31 or + self.ystridx == 0 or + (yearfirst and self[1] <= 12 and self[2] <= 31)): + # 99-01-01 + if dayfirst and self[2] <= 12: + year, day, month = self + else: + year, month, day = self + elif self[0] > 12 or (dayfirst and self[1] <= 12): + # 13-01-01 + day, month, year = self + else: + # 01-13-01 + month, day, year = self + + return year, month, day + + +class parser(object): + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, + ignoretz=False, tzinfos=None, **kwargs): + """ + Parse the date/time string into a :class:`datetime.datetime` object. + + :param timestr: + Any date/time string using the supported formats. + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a + naive :class:`datetime.datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param \\*\\*kwargs: + Keyword arguments as passed to ``_parse()``. + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ValueError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises TypeError: + Raised for non-string or character stream input. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + + if default is None: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + + res, skipped_tokens = self._parse(timestr, **kwargs) + + if res is None: + raise ValueError("Unknown string format:", timestr) + + if len(res) == 0: + raise ValueError("String does not contain a date:", timestr) + + ret = self._build_naive(res, default) + + if not ignoretz: + ret = self._build_tzaware(ret, res, tzinfos) + + if kwargs.get('fuzzy_with_tokens', False): + return ret, skipped_tokens + else: + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset", "ampm","any_unused_tokens"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): + """ + Private method which performs the heavy lifting of parsing, called from + ``parse()``, which passes on its ``kwargs`` to this function. + + :param timestr: + The string to parse. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. If set to ``None``, this value is retrieved from the + current :class:`parserinfo` object (which itself defaults to + ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + If this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + """ + if fuzzy_with_tokens: + fuzzy = True + + info = self.info + + if dayfirst is None: + dayfirst = info.dayfirst + + if yearfirst is None: + yearfirst = info.yearfirst + + res = self._result() + l = _timelex.split(timestr) # Splits the timestr into tokens + + skipped_idxs = [] + + # year/month/day list + ymd = _ymd() + + len_l = len(l) + i = 0 + try: + while i < len_l: + + # Check if it's a number + value_repr = l[i] + try: + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Numeric token + i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) + + # Check weekday + elif info.weekday(l[i]) is not None: + value = info.weekday(l[i]) + res.weekday = value + + # Check month name + elif info.month(l[i]) is not None: + value = info.month(l[i]) + ymd.append(value, 'M') + + if i + 1 < len_l: + if l[i + 1] in ('-', '/'): + # Jan-01[-99] + sep = l[i + 1] + ymd.append(l[i + 2]) + + if i + 3 < len_l and l[i + 3] == sep: + # Jan-01-99 + ymd.append(l[i + 4]) + i += 2 + + i += 2 + + elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and + info.pertain(l[i + 2])): + # Jan of 01 + # In this case, 01 is clearly year + if l[i + 4].isdigit(): + # Convert it here to become unambiguous + value = int(l[i + 4]) + year = str(info.convertyear(value)) + ymd.append(year, 'Y') + else: + # Wrong guess + pass + # TODO: not hit in tests + i += 4 + + # Check am/pm + elif info.ampm(l[i]) is not None: + value = info.ampm(l[i]) + val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) + + if val_is_ampm: + res.hour = self._adjust_ampm(res.hour, value) + res.ampm = value + + elif fuzzy: + skipped_idxs.append(i) + + # Check for a timezone name + elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i + 1 < len_l and l[i + 1] in ('+', '-'): + l[i + 1] = ('+', '-')[l[i + 1] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + # Check for a numbered timezone + elif res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + len_li = len(l[i + 1]) + + # TODO: check that l[i + 1] is integer? + if len_li == 4: + # -0300 + hour_offset = int(l[i + 1][:2]) + min_offset = int(l[i + 1][2:]) + elif i + 2 < len_l and l[i + 2] == ':': + # -03:00 + hour_offset = int(l[i + 1]) + min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? + i += 2 + elif len_li <= 2: + # -[0]3 + hour_offset = int(l[i + 1][:2]) + min_offset = 0 + else: + raise ValueError(timestr) + + res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) + + # Look for a timezone name between parenthesis + if (i + 5 < len_l and + info.jump(l[i + 2]) and l[i + 3] == '(' and + l[i + 5] == ')' and + 3 <= len(l[i + 4]) and + self._could_be_tzname(res.hour, res.tzname, + None, l[i + 4])): + # -0300 (BRST) + res.tzname = l[i + 4] + i += 4 + + i += 1 + + # Check jumps + elif not (info.jump(l[i]) or fuzzy): + raise ValueError(timestr) + + else: + skipped_idxs.append(i) + i += 1 + + # Process year/month/day + year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) + + res.century_specified = ymd.century_specified + res.year = year + res.month = month + res.day = day + + except (IndexError, ValueError): + return None, None + + if not info.validate(res): + return None, None + + if fuzzy_with_tokens: + skipped_tokens = self._recombine_skipped(l, skipped_idxs) + return res, tuple(skipped_tokens) + else: + return res, None + + def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): + # Token is a number + value_repr = tokens[idx] + try: + value = self._to_decimal(value_repr) + except Exception as e: + six.raise_from(ValueError('Unknown numeric token'), e) + + len_li = len(value_repr) + + len_l = len(tokens) + + if (len(ymd) == 3 and len_li in (2, 4) and + res.hour is None and + (idx + 1 >= len_l or + (tokens[idx + 1] != ':' and + info.hms(tokens[idx + 1]) is None))): + # 19990101T23[59] + s = tokens[idx] + res.hour = int(s[:2]) + + if len_li == 4: + res.minute = int(s[2:]) + + elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = tokens[idx] + + if not ymd and '.' not in tokens[idx]: + ymd.append(s[:2]) + ymd.append(s[2:4]) + ymd.append(s[4:]) + else: + # 19990101T235959[.59] + + # TODO: Check if res attributes already set. + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = self._parsems(s[4:]) + + elif len_li in (8, 12, 14): + # YYYYMMDD + s = tokens[idx] + ymd.append(s[:4], 'Y') + ymd.append(s[4:6]) + ymd.append(s[6:8]) + + if len_li > 8: + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + + if len_li > 12: + res.second = int(s[12:]) + + elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) + (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) + if hms is not None: + # TODO: checking that hour/minute/second are not + # already set? + self._assign_hms(res, value_repr, hms) + + elif idx + 2 < len_l and tokens[idx + 1] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? + (res.minute, res.second) = self._parse_min_sec(value) + + if idx + 4 < len_l and tokens[idx + 3] == ':': + res.second, res.microsecond = self._parsems(tokens[idx + 4]) + + idx += 2 + + idx += 2 + + elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): + sep = tokens[idx + 1] + ymd.append(value_repr) + + if idx + 2 < len_l and not info.jump(tokens[idx + 2]): + if tokens[idx + 2].isdigit(): + # 01-01[-01] + ymd.append(tokens[idx + 2]) + else: + # 01-Jan[-01] + value = info.month(tokens[idx + 2]) + + if value is not None: + ymd.append(value, 'M') + else: + raise ValueError() + + if idx + 3 < len_l and tokens[idx + 3] == sep: + # We have three members + value = info.month(tokens[idx + 4]) + + if value is not None: + ymd.append(value, 'M') + else: + ymd.append(tokens[idx + 4]) + idx += 2 + + idx += 1 + idx += 1 + + elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): + if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: + # 12 am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) + idx += 1 + else: + # Year, month or day + ymd.append(value) + idx += 1 + + elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): + # 12am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) + idx += 1 + + elif ymd.could_be_day(value): + ymd.append(value) + + elif not fuzzy: + raise ValueError() + + return idx + + def _find_hms_idx(self, idx, tokens, info, allow_jump): + len_l = len(tokens) + + if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: + # There is an "h", "m", or "s" label following this token. We take + # assign the upcoming label to the current token. + # e.g. the "12" in 12h" + hms_idx = idx + 1 + + elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and + info.hms(tokens[idx+2]) is not None): + # There is a space and then an "h", "m", or "s" label. + # e.g. the "12" in "12 h" + hms_idx = idx + 2 + + elif idx > 0 and info.hms(tokens[idx-1]) is not None: + # There is a "h", "m", or "s" preceeding this token. Since neither + # of the previous cases was hit, there is no label following this + # token, so we use the previous label. + # e.g. the "04" in "12h04" + hms_idx = idx-1 + + elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and + info.hms(tokens[idx-2]) is not None): + # If we are looking at the final token, we allow for a + # backward-looking check to skip over a space. + # TODO: Are we sure this is the right condition here? + hms_idx = idx - 2 + + else: + hms_idx = None + + return hms_idx + + def _assign_hms(self, res, value_repr, hms): + # See GH issue #427, fixing float rounding + value = self._to_decimal(value_repr) + + if hms == 0: + # Hour + res.hour = int(value) + if value % 1: + res.minute = int(60*(value % 1)) + + elif hms == 1: + (res.minute, res.second) = self._parse_min_sec(value) + + elif hms == 2: + (res.second, res.microsecond) = self._parsems(value_repr) + + def _could_be_tzname(self, hour, tzname, tzoffset, token): + return (hour is not None and + tzname is None and + tzoffset is None and + len(token) <= 5 and + (all(x in string.ascii_uppercase for x in token) + or token in self.info.UTCZONE)) + + def _ampm_valid(self, hour, ampm, fuzzy): + """ + For fuzzy parsing, 'a' or 'am' (both valid English words) + may erroneously trigger the AM/PM flag. Deal with that + here. + """ + val_is_ampm = True + + # If there's already an AM/PM flag, this one isn't one. + if fuzzy and ampm is not None: + val_is_ampm = False + + # If AM/PM is found and hour is not, raise a ValueError + if hour is None: + if fuzzy: + val_is_ampm = False + else: + raise ValueError('No hour specified with AM or PM flag.') + elif not 0 <= hour <= 12: + # If AM/PM is found, it's a 12 hour clock, so raise + # an error for invalid range + if fuzzy: + val_is_ampm = False + else: + raise ValueError('Invalid hour specified for 12-hour clock.') + + return val_is_ampm + + def _adjust_ampm(self, hour, ampm): + if hour < 12 and ampm == 1: + hour += 12 + elif hour == 12 and ampm == 0: + hour = 0 + return hour + + def _parse_min_sec(self, value): + # TODO: Every usage of this function sets res.second to the return + # value. Are there any cases where second will be returned as None and + # we *dont* want to set res.second = None? + minute = int(value) + second = None + + sec_remainder = value % 1 + if sec_remainder: + second = int(60 * sec_remainder) + return (minute, second) + + def _parsems(self, value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + def _parse_hms(self, idx, tokens, info, hms_idx): + # TODO: Is this going to admit a lot of false-positives for when we + # just happen to have digits and "h", "m" or "s" characters in non-date + # text? I guess hex hashes won't have that problem, but there's plenty + # of random junk out there. + if hms_idx is None: + hms = None + new_idx = idx + elif hms_idx > idx: + hms = info.hms(tokens[hms_idx]) + new_idx = hms_idx + else: + # Looking backwards, increment one. + hms = info.hms(tokens[hms_idx]) + 1 + new_idx = idx + + return (new_idx, hms) + + def _recombine_skipped(self, tokens, skipped_idxs): + """ + >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] + >>> skipped_idxs = [0, 1, 2, 5] + >>> _recombine_skipped(tokens, skipped_idxs) + ["foo bar", "baz"] + """ + skipped_tokens = [] + for i, idx in enumerate(sorted(skipped_idxs)): + if i > 0 and idx - 1 == skipped_idxs[i - 1]: + skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] + else: + skipped_tokens.append(tokens[idx]) + + return skipped_tokens + + def _build_tzinfo(self, tzinfos, tzname, tzoffset): + if callable(tzinfos): + tzdata = tzinfos(tzname, tzoffset) + else: + tzdata = tzinfos.get(tzname) + # handle case where tzinfo is paased an options that returns None + # eg tzinfos = {'BRST' : None} + if isinstance(tzdata, datetime.tzinfo) or tzdata is None: + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(tzname, tzdata) + return tzinfo + + def _build_tzaware(self, naive, res, tzinfos): + if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): + tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) + aware = naive.replace(tzinfo=tzinfo) + aware = self._assign_tzname(aware, res.tzname) + + elif res.tzname and res.tzname in time.tzname: + aware = naive.replace(tzinfo=tz.tzlocal()) + + # Handle ambiguous local datetime + aware = self._assign_tzname(aware, res.tzname) + + # This is mostly relevant for winter GMT zones parsed in the UK + if (aware.tzname() != res.tzname and + res.tzname in self.info.UTCZONE): + aware = aware.replace(tzinfo=tz.tzutc()) + + elif res.tzoffset == 0: + aware = naive.replace(tzinfo=tz.tzutc()) + + elif res.tzoffset: + aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + + elif not res.tzname and not res.tzoffset: + # i.e. no timezone information was found. + aware = naive + + elif res.tzname: + # tz-like string was parsed but we don't know what to do + # with it + warnings.warn("tzname {tzname} identified but not understood. " + "Pass `tzinfos` argument in order to correctly " + "return a timezone-aware datetime. In a future " + "version, this will raise an " + "exception.".format(tzname=res.tzname), + category=UnknownTimezoneWarning) + aware = naive + + return aware + + def _build_naive(self, res, default): + repl = {} + for attr in ("year", "month", "day", "hour", + "minute", "second", "microsecond"): + value = getattr(res, attr) + if value is not None: + repl[attr] = value + + if 'day' not in repl: + # If the default day exceeds the last day of the month, fall back + # to the end of the month. + cyear = default.year if res.year is None else res.year + cmonth = default.month if res.month is None else res.month + cday = default.day if res.day is None else res.day + + if cday > monthrange(cyear, cmonth)[1]: + repl['day'] = monthrange(cyear, cmonth)[1] + + naive = default.replace(**repl) + + if res.weekday is not None and not res.day: + naive = naive + relativedelta.relativedelta(weekday=res.weekday) + + return naive + + def _assign_tzname(self, dt, tzname): + if dt.tzname() != tzname: + new_dt = tz.enfold(dt, fold=1) + if new_dt.tzname() == tzname: + return new_dt + + return dt + + def _to_decimal(self, val): + try: + decimal_value = Decimal(val) + # See GH 662, edge case, infinite value should not be converted via `_to_decimal` + if not decimal_value.is_finite(): + raise ValueError("Converted decimal value is infinite or NaN") + except Exception as e: + msg = "Could not convert %s to decimal" % val + six.raise_from(ValueError(msg), e) + else: + return decimal_value + + +DEFAULTPARSER = parser() + + +def parse(timestr, parserinfo=None, **kwargs): + """ + + Parse a string in one of the supported formats, using the + ``parserinfo`` parameters. + + :param timestr: + A string containing a date/time stamp. + + :param parserinfo: + A :class:`parserinfo` object containing parameters for the parser. + If ``None``, the default arguments to the :class:`parserinfo` + constructor are used. + + The ``**kwargs`` parameter takes the following keyword arguments: + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM and + YMD. If set to ``None``, this value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken to + be the year, otherwise the last number is taken to be the year. If + this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ValueError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] + used_idxs = list() + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + + for ii in range(j): + used_idxs.append(ii) + i = j + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + used_idxs.append(i) + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) * signal) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i]) * 3600 + + int(l[i + 2]) * 60) * signal) + used_idxs.append(i) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2]) * 3600 * signal) + else: + return None + used_idxs.append(i) + i += 1 + if res.dstabbr: + break + else: + break + + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': + l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789+-"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + used_idxs.append(i) + i += 2 + if l[i] == '-': + value = int(l[i + 1]) * -1 + used_idxs.append(i) + i += 1 + else: + value = int(l[i]) + used_idxs.append(i) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i]) - 1) % 7 + else: + x.day = int(l[i]) + used_idxs.append(i) + i += 2 + x.time = int(l[i]) + used_idxs.append(i) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + used_idxs.append(i) + i += 1 + else: + signal = 1 + used_idxs.append(i) + res.dstoffset = (res.stdoffset + int(l[i]) * signal) + + # This was a made-up format that is not in normal use + warn(('Parsed time zone "%s"' % tzstr) + + 'is in a non-standard dateutil-specific format, which ' + + 'is now deprecated; support for parsing this format ' + + 'will be removed in future versions. It is recommended ' + + 'that you switch to a standard format like the GNU ' + + 'TZ variable format.', tz.DeprecatedTzFormatWarning) + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + used_idxs.append(i) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + used_idxs.append(i) + i += 1 + x.month = int(l[i]) + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.weekday = (int(l[i]) - 1) % 7 + else: + # year day (zero based) + x.yday = int(l[i]) + 1 + + used_idxs.append(i) + i += 1 + + if i < len_l and l[i] == '/': + used_idxs.append(i) + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 + used_idxs.append(i) + i += 2 + if i + 1 < len_l and l[i + 1] == ':': + used_idxs.append(i) + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2]) * 3600) + else: + return None + used_idxs.append(i) + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + unused_idxs = set(range(len_l)).difference(used_idxs) + res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) + return res + + +DEFAULTTZPARSER = _tzparser() + + +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + +class UnknownTimezoneWarning(RuntimeWarning): + """Raised when the parser finds a timezone it cannot parse into a tzinfo""" +# vim:ts=4:sw=4:et diff --git a/py3/lib/python3.6/site-packages/dateutil/parser/isoparser.py b/py3/lib/python3.6/site-packages/dateutil/parser/isoparser.py new file mode 100644 index 0000000..e3cf6d8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/parser/isoparser.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +""" +This module offers a parser for ISO-8601 strings + +It is intended to support all valid date, time and datetime formats per the +ISO-8601 specification. + +..versionadded:: 2.7.0 +""" +from datetime import datetime, timedelta, time, date +import calendar +from dateutil import tz + +from functools import wraps + +import re +import six + +__all__ = ["isoparse", "isoparser"] + + +def _takes_ascii(f): + @wraps(f) + def func(self, str_in, *args, **kwargs): + # If it's a stream, read the whole thing + str_in = getattr(str_in, 'read', lambda: str_in)() + + # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII + if isinstance(str_in, six.text_type): + # ASCII is the same in UTF-8 + try: + str_in = str_in.encode('ascii') + except UnicodeEncodeError as e: + msg = 'ISO-8601 strings should contain only ASCII characters' + six.raise_from(ValueError(msg), e) + + return f(self, str_in, *args, **kwargs) + + return func + + +class isoparser(object): + def __init__(self, sep=None): + """ + :param sep: + A single character that separates date and time portions. If + ``None``, the parser will accept any single character. + For strict ISO-8601 adherence, pass ``'T'``. + """ + if sep is not None: + if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): + raise ValueError('Separator must be a single, non-numeric ' + + 'ASCII character') + + sep = sep.encode('ascii') + + self._sep = sep + + @_takes_ascii + def isoparse(self, dt_str): + """ + Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. + + An ISO-8601 datetime string consists of a date portion, followed + optionally by a time portion - the date and time portions are separated + by a single character separator, which is ``T`` in the official + standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be + combined with a time portion. + + Supported date formats are: + + Common: + + - ``YYYY`` + - ``YYYY-MM`` or ``YYYYMM`` + - ``YYYY-MM-DD`` or ``YYYYMMDD`` + + Uncommon: + + - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) + - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day + + The ISO week and day numbering follows the same logic as + :func:`datetime.date.isocalendar`. + + Supported time formats are: + + - ``hh`` + - ``hh:mm`` or ``hhmm`` + - ``hh:mm:ss`` or ``hhmmss`` + - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) + + Midnight is a special case for `hh`, as the standard supports both + 00:00 and 24:00 as a representation. The decimal separator can be + either a dot or a comma. + + + .. caution:: + + Support for fractional components other than seconds is part of the + ISO-8601 standard, but is not currently implemented in this parser. + + Supported time zone offset formats are: + + - `Z` (UTC) + - `±HH:MM` + - `±HHMM` + - `±HH` + + Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, + with the exception of UTC, which will be represented as + :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such + as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. + + :param dt_str: + A string or stream containing only an ISO-8601 datetime string + + :return: + Returns a :class:`datetime.datetime` representing the string. + Unspecified components default to their lowest value. + + .. warning:: + + As of version 2.7.0, the strictness of the parser should not be + considered a stable part of the contract. Any valid ISO-8601 string + that parses correctly with the default settings will continue to + parse correctly in future versions, but invalid strings that + currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not + guaranteed to continue failing in future versions if they encode + a valid date. + + .. versionadded:: 2.7.0 + """ + components, pos = self._parse_isodate(dt_str) + + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + """ + Parse the date portion of an ISO string. + + :param datestr: + The string portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.date` object + """ + components, pos = self._parse_isodate(datestr) + if pos < len(datestr): + raise ValueError('String contains unknown ISO ' + + 'components: {}'.format(datestr)) + return date(*components) + + @_takes_ascii + def parse_isotime(self, timestr): + """ + Parse the time portion of an ISO string. + + :param timestr: + The time portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.time` object + """ + components = self._parse_isotime(timestr) + if components[0] == 24: + components[0] = 0 + return time(*components) + + @_takes_ascii + def parse_tzstr(self, tzstr, zero_as_utc=True): + """ + Parse a valid ISO time zone string. + + See :func:`isoparser.isoparse` for details on supported formats. + + :param tzstr: + A string representing an ISO time zone offset + + :param zero_as_utc: + Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones + + :return: + Returns :class:`dateutil.tz.tzoffset` for offsets and + :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is + specified) offsets equivalent to UTC. + """ + return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) + + # Constants + _DATE_SEP = b'-' + _TIME_SEP = b':' + _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') + + def _parse_isodate(self, dt_str): + try: + return self._parse_isodate_common(dt_str) + except ValueError: + return self._parse_isodate_uncommon(dt_str) + + def _parse_isodate_common(self, dt_str): + len_str = len(dt_str) + components = [1, 1, 1] + + if len_str < 4: + raise ValueError('ISO string too short') + + # Year + components[0] = int(dt_str[0:4]) + pos = 4 + if pos >= len_str: + return components, pos + + has_sep = dt_str[pos:pos + 1] == self._DATE_SEP + if has_sep: + pos += 1 + + # Month + if len_str - pos < 2: + raise ValueError('Invalid common month') + + components[1] = int(dt_str[pos:pos + 2]) + pos += 2 + + if pos >= len_str: + if has_sep: + return components, pos + else: + raise ValueError('Invalid ISO format') + + if has_sep: + if dt_str[pos:pos + 1] != self._DATE_SEP: + raise ValueError('Invalid separator in ISO string') + pos += 1 + + # Day + if len_str - pos < 2: + raise ValueError('Invalid common day') + components[2] = int(dt_str[pos:pos + 2]) + return components, pos + 2 + + def _parse_isodate_uncommon(self, dt_str): + if len(dt_str) < 4: + raise ValueError('ISO string too short') + + # All ISO formats start with the year + year = int(dt_str[0:4]) + + has_sep = dt_str[4:5] == self._DATE_SEP + + pos = 4 + has_sep # Skip '-' if it's there + if dt_str[pos:pos + 1] == b'W': + # YYYY-?Www-?D? + pos += 1 + weekno = int(dt_str[pos:pos + 2]) + pos += 2 + + dayno = 1 + if len(dt_str) > pos: + if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: + raise ValueError('Inconsistent use of dash separator') + + pos += has_sep + + dayno = int(dt_str[pos:pos + 1]) + pos += 1 + + base_date = self._calculate_weekdate(year, weekno, dayno) + else: + # YYYYDDD or YYYY-DDD + if len(dt_str) - pos < 3: + raise ValueError('Invalid ordinal day') + + ordinal_day = int(dt_str[pos:pos + 3]) + pos += 3 + + if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): + raise ValueError('Invalid ordinal day' + + ' {} for year {}'.format(ordinal_day, year)) + + base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) + + components = [base_date.year, base_date.month, base_date.day] + return components, pos + + def _calculate_weekdate(self, year, week, day): + """ + Calculate the day of corresponding to the ISO year-week-day calendar. + + This function is effectively the inverse of + :func:`datetime.date.isocalendar`. + + :param year: + The year in the ISO calendar + + :param week: + The week in the ISO calendar - range is [1, 53] + + :param day: + The day in the ISO calendar - range is [1 (MON), 7 (SUN)] + + :return: + Returns a :class:`datetime.date` + """ + if not 0 < week < 54: + raise ValueError('Invalid week: {}'.format(week)) + + if not 0 < day < 8: # Range is 1-7 + raise ValueError('Invalid weekday: {}'.format(day)) + + # Get week 1 for the specific year: + jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it + week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) + + # Now add the specific number of weeks and days to get what we want + week_offset = (week - 1) * 7 + (day - 1) + return week_1 + timedelta(days=week_offset) + + def _parse_isotime(self, timestr): + len_str = len(timestr) + components = [0, 0, 0, 0, None] + pos = 0 + comp = -1 + + if len(timestr) < 2: + raise ValueError('ISO time too short') + + has_sep = len_str >= 3 and timestr[2:3] == self._TIME_SEP + + while pos < len_str and comp < 5: + comp += 1 + + if timestr[pos:pos + 1] in b'-+Zz': + # Detect time zone boundary + components[-1] = self._parse_tzstr(timestr[pos:]) + pos = len_str + break + + if comp < 3: + # Hour, minute, second + components[comp] = int(timestr[pos:pos + 2]) + pos += 2 + if (has_sep and pos < len_str and + timestr[pos:pos + 1] == self._TIME_SEP): + pos += 1 + + if comp == 3: + # Fraction of a second + frac = self._FRACTION_REGEX.match(timestr[pos:]) + if not frac: + continue + + us_str = frac.group(1)[:6] # Truncate to microseconds + components[comp] = int(us_str) * 10**(6 - len(us_str)) + pos += len(frac.group()) + + if pos < len_str: + raise ValueError('Unused components in ISO string') + + if components[0] == 24: + # Standard supports 00:00 and 24:00 as representations of midnight + if any(component != 0 for component in components[1:4]): + raise ValueError('Hour may only be 24 at 24:00:00.000') + + return components + + def _parse_tzstr(self, tzstr, zero_as_utc=True): + if tzstr == b'Z' or tzstr == b'z': + return tz.tzutc() + + if len(tzstr) not in {3, 5, 6}: + raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') + + if tzstr[0:1] == b'-': + mult = -1 + elif tzstr[0:1] == b'+': + mult = 1 + else: + raise ValueError('Time zone offset requires sign') + + hours = int(tzstr[1:3]) + if len(tzstr) == 3: + minutes = 0 + else: + minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) + + if zero_as_utc and hours == 0 and minutes == 0: + return tz.tzutc() + else: + if minutes > 59: + raise ValueError('Invalid minutes in time zone offset') + + if hours > 23: + raise ValueError('Invalid hours in time zone offset') + + return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) + + +DEFAULT_ISOPARSER = isoparser() +isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/py3/lib/python3.6/site-packages/dateutil/relativedelta.py b/py3/lib/python3.6/site-packages/dateutil/relativedelta.py new file mode 100644 index 0000000..c65c66e --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/relativedelta.py @@ -0,0 +1,599 @@ +# -*- coding: utf-8 -*- +import datetime +import calendar + +import operator +from math import copysign + +from six import integer_types +from warnings import warn + +from ._common import weekday + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + + +class relativedelta(object): + """ + The relativedelta type is designed to be applied to an existing datetime and + can replace specific components of that datetime, or represents an interval + of time. + + It is based on the specification of the excellent work done by M.-A. Lemburg + in his + `mx.DateTime `_ extension. + However, notice that this type does *NOT* implement the same algorithm as + his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + + There are two different ways to build a relativedelta instance. The + first one is passing it two date/datetime classes:: + + relativedelta(datetime1, datetime2) + + The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) + + year, month, day, hour, minute, second, microsecond: + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an arithmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding aritmetic operation on the original datetime value + with the information in the relativedelta. + + weekday: + One of the weekday instances (MO, TU, etc) available in the + relativedelta module. These instances may receive a parameter N, + specifying the Nth weekday, which could be positive or negative + (like MO(+1) or MO(-2)). Not specifying it is the same as specifying + +1. You can also use an integer, where 0=MO. This argument is always + relative e.g. if the calculated date is already Monday, using MO(1) + or MO(-1) won't change the day. To effectively make it absolute, use + it in combination with the day argument (e.g. day=1, MO(1) for first + Monday of the month). + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + + There are relative and absolute forms of the keyword + arguments. The plural is relative, and the singular is + absolute. For each argument in the order below, the absolute form + is applied first (by setting each attribute to that value) and + then the relative form (by adding the value to the attribute). + + The order of attributes considered when this relativedelta is + added to a datetime is: + + 1. Year + 2. Month + 3. Day + 4. Hours + 5. Minutes + 6. Seconds + 7. Microseconds + + Finally, weekday is applied, using the rule described above. + + For example + + >>> from datetime import datetime + >>> from dateutil.relativedelta import relativedelta, MO + >>> dt = datetime(2018, 4, 9, 13, 37, 0) + >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) + >>> dt + delta + datetime.datetime(2018, 4, 2, 14, 37) + + First, the day is set to 1 (the first of the month), then 25 hours + are added, to get to the 2nd day and 14th hour, finally the + weekday is applied, but since the 2nd is already a Monday there is + no effect. + + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + + if dt1 and dt2: + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + # Get year / month delta between the two + months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) + self._set_months(months) + + # Remove the year/month delta so the timedelta is just well-defined + # time units (seconds, days and microseconds) + dtm = self.__radd__(dt2) + + # If we've overshot our target, make an adjustment + if dt1 < dt2: + compare = operator.gt + increment = 1 + else: + compare = operator.lt + increment = -1 + + while compare(dt1, dtm): + months += increment + self._set_months(months) + dtm = self.__radd__(dt2) + + # Get the timedelta between the "months-adjusted" date and dt1 + delta = dt1 - dtm + self.seconds = delta.seconds + delta.days * 86400 + self.microseconds = delta.microseconds + else: + # Check for non-integer values in integer-only quantities + if any(x is not None and x != int(x) for x in (years, months)): + raise ValueError("Non-integer years and months are " + "ambiguous and not currently supported.") + + # Relative information + self.years = int(years) + self.months = int(months) + self.days = days + weeks * 7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + + # Absolute information + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if any(x is not None and int(x) != x + for x in (year, month, day, hour, + minute, second, microsecond)): + # For now we'll deprecate floats - later it'll be an error. + warn("Non-integer value passed as absolute information. " + + "This is not a well-defined condition and will raise " + + "errors in future versions.", DeprecationWarning) + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = _sign(self.microseconds) + div, mod = divmod(self.microseconds * s, 1000000) + self.microseconds = mod * s + self.seconds += div * s + if abs(self.seconds) > 59: + s = _sign(self.seconds) + div, mod = divmod(self.seconds * s, 60) + self.seconds = mod * s + self.minutes += div * s + if abs(self.minutes) > 59: + s = _sign(self.minutes) + div, mod = divmod(self.minutes * s, 60) + self.minutes = mod * s + self.hours += div * s + if abs(self.hours) > 23: + s = _sign(self.hours) + div, mod = divmod(self.hours * s, 24) + self.hours = mod * s + self.days += div * s + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years += div * s + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + @property + def weeks(self): + return int(self.days / 7.0) + + @weeks.setter + def weeks(self, value): + self.days = self.days - (self.weeks * 7) + value * 7 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years = div * s + else: + self.years = 0 + + def normalized(self): + """ + Return a version of this object represented entirely using integer + values for the relative attributes. + + >>> relativedelta(days=1.5, hours=2).normalized() + relativedelta(days=+1, hours=+14) + + :return: + Returns a :class:`dateutil.relativedelta.relativedelta` object. + """ + # Cascade remainders down (rounding each to roughly nearest microsecond) + days = int(self.days) + + hours_f = round(self.hours + 24 * (self.days - days), 11) + hours = int(hours_f) + + minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) + minutes = int(minutes_f) + + seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) + seconds = int(seconds_f) + + microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) + + # Constructor carries overflow back up with call to _fix() + return self.__class__(years=self.years, months=self.months, + days=days, hours=hours, minutes=minutes, + seconds=seconds, microseconds=microseconds, + leapdays=self.leapdays, year=self.year, + month=self.month, day=self.day, + weekday=self.weekday, hour=self.hour, + minute=self.minute, second=self.second, + microsecond=self.microsecond) + + def __add__(self, other): + if isinstance(other, relativedelta): + return self.__class__(years=other.years + self.years, + months=other.months + self.months, + days=other.days + self.days, + hours=other.hours + self.hours, + minutes=other.minutes + self.minutes, + seconds=other.seconds + self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=(other.year if other.year is not None + else self.year), + month=(other.month if other.month is not None + else self.month), + day=(other.day if other.day is not None + else self.day), + weekday=(other.weekday if other.weekday is not None + else self.weekday), + hour=(other.hour if other.hour is not None + else self.hour), + minute=(other.minute if other.minute is not None + else self.minute), + second=(other.second if other.second is not None + else self.second), + microsecond=(other.microsecond if other.microsecond + is not None else + self.microsecond)) + if isinstance(other, datetime.timedelta): + return self.__class__(years=self.years, + months=self.months, + days=self.days + other.days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds + other.seconds, + microseconds=self.microseconds + other.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + if not isinstance(other, datetime.date): + return NotImplemented + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth) - 1) * 7 + if nth > 0: + jumpdays += (7 - ret.weekday() + weekday) % 7 + else: + jumpdays += (ret.weekday() - weekday) % 7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented # In case the other object defines __rsub__ + return self.__class__(years=self.years - other.years, + months=self.months - other.months, + days=self.days - other.days, + hours=self.hours - other.hours, + minutes=self.minutes - other.minutes, + seconds=self.seconds - other.seconds, + microseconds=self.microseconds - other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=(self.year if self.year is not None + else other.year), + month=(self.month if self.month is not None else + other.month), + day=(self.day if self.day is not None else + other.day), + weekday=(self.weekday if self.weekday is not None else + other.weekday), + hour=(self.hour if self.hour is not None else + other.hour), + minute=(self.minute if self.minute is not None else + other.minute), + second=(self.second if self.second is not None else + other.second), + microsecond=(self.microsecond if self.microsecond + is not None else + other.microsecond)) + + def __abs__(self): + return self.__class__(years=abs(self.years), + months=abs(self.months), + days=abs(self.days), + hours=abs(self.hours), + minutes=abs(self.minutes), + seconds=abs(self.seconds), + microseconds=abs(self.microseconds), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + # Compatibility with Python 2.x + __nonzero__ = __bool__ + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + microseconds=int(self.microseconds * f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.microseconds == other.microseconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + def __hash__(self): + return hash(( + self.weekday, + self.years, + self.months, + self.days, + self.hours, + self.minutes, + self.seconds, + self.microseconds, + self.leapdays, + self.year, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + )) + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("{attr}={value:+g}".format(attr=attr, value=value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("{attr}={value}".format(attr=attr, value=repr(value))) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(l)) + + +def _sign(x): + return int(copysign(1, x)) + +# vim:ts=4:sw=4:et diff --git a/py3/lib/python3.6/site-packages/dateutil/rrule.py b/py3/lib/python3.6/site-packages/dateutil/rrule.py new file mode 100644 index 0000000..20a0c4a --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/rrule.py @@ -0,0 +1,1736 @@ +# -*- coding: utf-8 -*- +""" +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC `_, +including support for caching of results. +""" +import itertools +import datetime +import calendar +import re +import sys + +try: + from math import gcd +except ImportError: + from fractions import gcd + +from six import advance_iterator, integer_types +from six.moves import _thread, range +import heapq + +from ._common import weekday as weekdaybase +from .tz import tzutc, tzlocal + +# For warning about deprecation of until and count +from warnings import warn + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + + +class weekday(weekdaybase): + """ + This version of weekday does not allow n = 0. + """ + def __init__(self, wkday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n==0") + + super(weekday, self).__init__(wkday, n) + + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + + +def _invalidates_cache(f): + """ + Decorator for rruleset methods which may invalidate the + cached length. + """ + def inner_func(self, *args, **kwargs): + rv = f(self, *args, **kwargs) + self._invalidate_cache() + return rv + + return inner_func + + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._invalidate_cache() + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _invalidate_cache(self): + if self._cache is not None: + self._cache = [] + self._cache_complete = False + self._cache_gen = self._iter() + + if self._cache_lock.locked(): + self._cache_lock.release() + + self._len = None + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penality. + def count(self): + """ Returns the number of recurrences in this set. It will have go + trough the whole recurrence, if this hasn't been done before. """ + if self._len is None: + for x in self: + pass + return self._len + + def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def xafter(self, dt, count=None, inc=False): + """ + Generator which yields up to `count` recurrences after the given + datetime instance, equivalent to `after`. + + :param dt: + The datetime at which to start generating recurrences. + + :param count: + The maximum number of recurrences to generate. If `None` (default), + dates are generated until the recurrence rule is exhausted. + + :param inc: + If `dt` is an instance of the rule and `inc` is `True`, it is + included in the output. + + :yields: Yields a sequence of `datetime` objects. + """ + + if self._cache_complete: + gen = self._cache + else: + gen = self + + # Select the comparison function + if inc: + comp = lambda dc, dtc: dc >= dtc + else: + comp = lambda dc, dtc: dc > dtc + + # Generate dates + n = 0 + for d in gen: + if comp(d, dt): + if count is not None: + n += 1 + if n > count: + break + + yield d + + def between(self, after, before, inc=False, count=1): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + + +class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + .. note:: + Per RFC section 3.3.10, recurrence instances falling on invalid dates + and times are ignored rather than coerced: + + Recurrence rules may generate recurrence instances with an invalid + date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM + on a day where the local time is moved forward by an hour at 1:00 + AM). Such recurrence instances MUST be ignored and MUST NOT be + counted as part of the recurrence set. + + This can lead to possibly surprising behavior when, for example, the + start date occurs at the end of the month: + + >>> from dateutil.rrule import rrule, MONTHLY + >>> from datetime import datetime + >>> start_date = datetime(2014, 12, 31) + >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) + ... # doctest: +NORMALIZE_WHITESPACE + [datetime.datetime(2014, 12, 31, 0, 0), + datetime.datetime(2015, 1, 31, 0, 0), + datetime.datetime(2015, 3, 31, 0, 0), + datetime.datetime(2015, 5, 31, 0, 0)] + + Additionally, it supports the following keyword arguments: + + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + If given, this determines how many occurrences will be generated. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param until: + If given, this must be a datetime instance specifying the upper-bound + limit of the recurrence. The last recurrence in the rule is the greatest + datetime that is less than or equal to the value specified in the + ``until`` parameter. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + """ + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + if until and until.tzinfo: + dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) + else: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + + # Cache the original byxxx rules, if they are provided, as the _byxxx + # attributes do not necessarily map to the inputs, and this can be + # a problem in generating the strings. Only store things if they've + # been supplied (the string retrieval will just use .get()) + self._original_rule = {} + + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + + if self._dtstart and self._until: + if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): + # According to RFC5545 Section 3.3.10: + # https://tools.ietf.org/html/rfc5545#section-3.3.10 + # + # > If the "DTSTART" property is specified as a date with UTC + # > time or a date with local time and time zone reference, + # > then the UNTIL rule part MUST be specified as a date with + # > UTC time. + raise ValueError( + 'RRULE UNTIL values must be specified in UTC when DTSTART ' + 'is timezone-aware' + ) + + if count is not None and until: + warn("Using both 'count' and 'until' is inconsistent with RFC 5545" + " and has been deprecated in dateutil. Future versions will " + "raise an error.", DeprecationWarning) + + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + + if self._bysetpos: + self._original_rule['bysetpos'] = self._bysetpos + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): + if freq == YEARLY: + if bymonth is None: + bymonth = dtstart.month + self._original_rule['bymonth'] = None + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == MONTHLY: + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == WEEKLY: + byweekday = dtstart.weekday() + self._original_rule['byweekday'] = None + + # bymonth + if bymonth is None: + self._bymonth = None + else: + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = tuple(sorted(set(bymonth))) + + if 'bymonth' not in self._original_rule: + self._original_rule['bymonth'] = self._bymonth + + # byyearday + if byyearday is None: + self._byyearday = None + else: + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = tuple(sorted(set(byyearday))) + self._original_rule['byyearday'] = self._byyearday + + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(sorted(byeaster)) + + self._original_rule['byeaster'] = self._byeaster + else: + self._byeaster = None + + # bymonthday + if bymonthday is None: + self._bymonthday = () + self._bynmonthday = () + else: + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + bymonthday = set(bymonthday) # Ensure it's unique + + self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) + self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) + + # Storing positive numbers first, then negative numbers + if 'bymonthday' not in self._original_rule: + self._original_rule['bymonthday'] = tuple( + itertools.chain(self._bymonthday, self._bynmonthday)) + + # byweekno + if byweekno is None: + self._byweekno = None + else: + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = tuple(sorted(set(byweekno))) + + self._original_rule['byweekno'] = self._byweekno + + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + else: + # If it's one of the valid non-sequence types, convert to a + # single-element sequence before the iterator that builds the + # byweekday set. + if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): + byweekday = (byweekday,) + + self._byweekday = set() + self._bynweekday = set() + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.add(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.add(wday.weekday) + else: + self._bynweekday.add((wday.weekday, wday.n)) + + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + + if self._byweekday is not None: + self._byweekday = tuple(sorted(self._byweekday)) + orig_byweekday = [weekday(x) for x in self._byweekday] + else: + orig_byweekday = () + + if self._bynweekday is not None: + self._bynweekday = tuple(sorted(self._bynweekday)) + orig_bynweekday = [weekday(*x) for x in self._bynweekday] + else: + orig_bynweekday = () + + if 'byweekday' not in self._original_rule: + self._original_rule['byweekday'] = tuple(itertools.chain( + orig_byweekday, orig_bynweekday)) + + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = {dtstart.hour} + else: + self._byhour = None + else: + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + + self._byhour = tuple(sorted(self._byhour)) + self._original_rule['byhour'] = self._byhour + + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = {dtstart.minute} + else: + self._byminute = None + else: + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + + self._byminute = tuple(sorted(self._byminute)) + self._original_rule['byminute'] = self._byminute + + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = ((dtstart.second,)) + else: + self._bysecond = None + else: + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) + + self._bysecond = tuple(sorted(self._bysecond)) + self._original_rule['bysecond'] = self._bysecond + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def __str__(self): + """ + Output a string that would generate this RRULE if passed to rrulestr. + This is mostly compatible with RFC5545, except for the + dateutil-specific extension BYEASTER. + """ + + output = [] + h, m, s = [None] * 3 + if self._dtstart: + output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) + h, m, s = self._dtstart.timetuple()[3:6] + + parts = ['FREQ=' + FREQNAMES[self._freq]] + if self._interval != 1: + parts.append('INTERVAL=' + str(self._interval)) + + if self._wkst: + parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) + + if self._count is not None: + parts.append('COUNT=' + str(self._count)) + + if self._until: + parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) + + if self._original_rule.get('byweekday') is not None: + # The str() method on weekday objects doesn't generate + # RFC5545-compliant strings, so we should modify that. + original_rule = dict(self._original_rule) + wday_strings = [] + for wday in original_rule['byweekday']: + if wday.n: + wday_strings.append('{n:+d}{wday}'.format( + n=wday.n, + wday=repr(wday)[0:2])) + else: + wday_strings.append(repr(wday)) + + original_rule['byweekday'] = wday_strings + else: + original_rule = self._original_rule + + partfmt = '{name}={vals}' + for name, key in [('BYSETPOS', 'bysetpos'), + ('BYMONTH', 'bymonth'), + ('BYMONTHDAY', 'bymonthday'), + ('BYYEARDAY', 'byyearday'), + ('BYWEEKNO', 'byweekno'), + ('BYDAY', 'byweekday'), + ('BYHOUR', 'byhour'), + ('BYMINUTE', 'byminute'), + ('BYSECOND', 'bysecond'), + ('BYEASTER', 'byeaster')]: + value = original_rule.get(key) + if value: + parts.append(partfmt.format(name=name, vals=(','.join(str(v) + for v in value)))) + + output.append('RRULE:' + ';'.join(parts)) + return '\n'.join(output) + + def replace(self, **kwargs): + """Return new rrule with same attributes except for those attributes given new + values by whichever keyword arguments are specified.""" + new_kwargs = {"interval": self._interval, + "count": self._count, + "dtstart": self._dtstart, + "freq": self._freq, + "until": self._until, + "wkst": self._wkst, + "cache": False if self._cache is None else True } + new_kwargs.update(self._original_rule) + new_kwargs.update(kwargs) + return rrule(**new_kwargs) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + total += 1 + yield res + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal + i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + + total += 1 + yield res + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) + if div: + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399 - (hour * 3600 + minute * 60 + second)) + // interval) * interval) + + rep_rate = (24 * 3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) + if div: + hour += div + div, hour = divmod(hour, 24) + if div: + day += div + fixday = True + + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param start: + Specifies the starting position. + :param byxxx: + An iterable containing the list of allowed values. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx, ) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param value: + The old value of the component. + :param byxxx: + The `BYXXX` set, which should have been generated by + `rrule._construct_byset`, or something else which checks that a + valid rule is present. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365 + calendar.isleap(year) + self.nextyearlen = 365 + calendar.isleap(year + 1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst) % 7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday) % 7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday) % 7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + dset = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + dset[i] = i + return dset, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + dset = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + dset[i] = i + i += 1 + # if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return dset, start, i + + def ddayset(self, year, month, day): + dset = [None] * self.yearlen + i = datetime.date(year, month, day).toordinal() - self.yearordinal + dset[i] = i + return dset, i, i + 1 + + def htimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def mtimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + if self.genlist[0] is self: + heapq.heappop(self.genlist) + else: + self.genlist.remove(self) + heapq.heapify(self.genlist) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + @_invalidates_cache + def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ + self._rrule.append(rrule) + + @_invalidates_cache + def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ + self._rdate.append(rdate) + + @_invalidates_cache + def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ + self._exrule.append(exrule) + + @_invalidates_cache + def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + lastdt = None + total = 0 + heapq.heapify(rlist) + heapq.heapify(exlist) + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + exitem = exlist[0] + advance_iterator(exitem) + if exlist and exlist[0] is exitem: + heapq.heapreplace(exlist, exitem) + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + if rlist and rlist[0] is ritem: + heapq.heapreplace(rlist, ritem) + self._len = total + + + + +class _rrulestr(object): + """ Parses a string representation of a recurrence rule or set of + recurrence rules. + + :param s: + Required, a string defining one or more recurrence rules. + + :param dtstart: + If given, used as the default recurrence start if not specified in the + rule string. + + :param cache: + If set ``True`` caching of results will be enabled, improving + performance of multiple queries considerably. + + :param unfold: + If set ``True`` indicates that a rule string is split over more + than one line and should be joined before processing. + + :param forceset: + If set ``True`` forces a :class:`dateutil.rrule.rruleset` to + be returned. + + :param compatible: + If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime.datetime` object is returned. + + :param tzids: + If given, a callable or mapping used to retrieve a + :class:`datetime.tzinfo` from a string representation. + Defaults to :func:`dateutil.tz.gettz`. + + :param tzinfos: + Additional time zone names / aliases which may be present in a string + representation. See :func:`dateutil.parser.parse` for more + information. + + :return: + Returns a :class:`dateutil.rrule.rruleset` or + :class:`dateutil.rrule.rrule` + """ + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): + """ + Two ways to specify this: +1MO or MO(+1) + """ + l = [] + for wday in value.split(','): + if '(' in wday: + # If it's of the form TH(+1), etc. + splt = wday.split('(') + w = splt[0] + n = int(splt[1][:-1]) + elif len(wday): + # If it's of the form +1MO + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: + n = int(n) + else: + raise ValueError("Invalid (empty) BYDAY specification.") + + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_date_value(self, date_value, parms, rule_tzids, + ignoretz, tzids, tzinfos): + global parser + if not parser: + from dateutil import parser + + datevals = [] + value_found = False + TZID = None + + for parm in parms: + if parm.startswith("TZID="): + try: + tzkey = rule_tzids[parm.split('TZID=')[-1]] + except KeyError: + continue + if tzids is None: + from . import tz + tzlookup = tz.gettz + elif callable(tzids): + tzlookup = tzids + else: + tzlookup = getattr(tzids, 'get', None) + if tzlookup is None: + msg = ('tzids must be a callable, mapping, or None, ' + 'not %s' % tzids) + raise ValueError(msg) + + TZID = tzlookup(tzkey) + continue + + # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found + # only once. + if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: + raise ValueError("unsupported parm: " + parm) + else: + if value_found: + msg = ("Duplicate value parameter found in: " + parm) + raise ValueError(msg) + value_found = True + + for datestr in date_value.split(','): + date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) + if TZID is not None: + if date.tzinfo is None: + date = date.replace(tzinfo=TZID) + else: + raise ValueError('DTSTART/EXDATE specifies multiple timezone') + datevals.append(date) + + return datevals + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzids=None, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + + TZID_NAMES = dict(map( + lambda x: (x.upper(), x), + re.findall('TZID=(?P[^:]+):', s) + )) + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + exdatevals.extend( + self._parse_date_value(value, parms, + TZID_NAMES, ignoretz, + tzids, tzinfos) + ) + elif name == "DTSTART": + dtvals = self._parse_date_value(value, parms, TZID_NAMES, + ignoretz, tzids, tzinfos) + if len(dtvals) != 1: + raise ValueError("Multiple DTSTART values specified:" + + value) + dtstart = dtvals[0] + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + rset = rruleset(cache=cache) + for value in rrulevals: + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + rset.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + rset.exdate(value) + if compatible and dtstart: + rset.rdate(dtstart) + return rset + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/py3/lib/python3.6/site-packages/dateutil/tz/__init__.py b/py3/lib/python3.6/site-packages/dateutil/tz/__init__.py new file mode 100644 index 0000000..5a2d9cd --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/tz/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +from .tz import * +from .tz import __doc__ + +#: Convenience constant providing a :class:`tzutc()` instance +#: +#: .. versionadded:: 2.7.0 +UTC = tzutc() + +__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", + "enfold", "datetime_ambiguous", "datetime_exists", + "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] + + +class DeprecatedTzFormatWarning(Warning): + """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/py3/lib/python3.6/site-packages/dateutil/tz/_common.py b/py3/lib/python3.6/site-packages/dateutil/tz/_common.py new file mode 100644 index 0000000..594e082 --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/tz/_common.py @@ -0,0 +1,419 @@ +from six import PY2 + +from functools import wraps + +from datetime import datetime, timedelta, tzinfo + + +ZERO = timedelta(0) + +__all__ = ['tzname_in_python2', 'enfold'] + + +def tzname_in_python2(namefunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + if PY2: + @wraps(namefunc) + def adjust_encoding(*args, **kwargs): + name = namefunc(*args, **kwargs) + if name is not None: + name = name.encode() + + return name + + return adjust_encoding + else: + return namefunc + + +# The following is adapted from Alexander Belopolsky's tz library +# https://github.com/abalkin/tz +if hasattr(datetime, 'fold'): + # This is the pre-python 3.6 fold situation + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + return dt.replace(fold=fold) + +else: + class _DatetimeWithFold(datetime): + """ + This is a class designed to provide a PEP 495-compliant interface for + Python versions before 3.6. It is used only for dates in a fold, so + the ``fold`` attribute is fixed at ``1``. + + .. versionadded:: 2.6.0 + """ + __slots__ = () + + def replace(self, *args, **kwargs): + """ + Return a datetime with the same attributes, except for those + attributes given new values by whichever keyword arguments are + specified. Note that tzinfo=None can be specified to create a naive + datetime from an aware datetime with no conversion of date and time + data. + + This is reimplemented in ``_DatetimeWithFold`` because pypy3 will + return a ``datetime.datetime`` even if ``fold`` is unchanged. + """ + argnames = ( + 'year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond', 'tzinfo' + ) + + for arg, argname in zip(args, argnames): + if argname in kwargs: + raise TypeError('Duplicate argument: {}'.format(argname)) + + kwargs[argname] = arg + + for argname in argnames: + if argname not in kwargs: + kwargs[argname] = getattr(self, argname) + + dt_class = self.__class__ if kwargs.get('fold', 1) else datetime + + return dt_class(**kwargs) + + @property + def fold(self): + return 1 + + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + if getattr(dt, 'fold', 0) == fold: + return dt + + args = dt.timetuple()[:6] + args += (dt.microsecond, dt.tzinfo) + + if fold: + return _DatetimeWithFold(*args) + else: + return datetime(*args) + + +def _validate_fromutc_inputs(f): + """ + The CPython version of ``fromutc`` checks that the input is a ``datetime`` + object and that ``self`` is attached as its ``tzinfo``. + """ + @wraps(f) + def fromutc(self, dt): + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + return f(self, dt) + + return fromutc + + +class _tzinfo(tzinfo): + """ + Base class for all ``dateutil`` ``tzinfo`` objects. + """ + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + + dt = dt.replace(tzinfo=self) + + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) + + return same_dt and not same_offset + + def _fold_status(self, dt_utc, dt_wall): + """ + Determine the fold status of a "wall" datetime, given a representation + of the same datetime as a (naive) UTC datetime. This is calculated based + on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all + datetimes, and that this offset is the actual number of hours separating + ``dt_utc`` and ``dt_wall``. + + :param dt_utc: + Representation of the datetime as UTC + + :param dt_wall: + Representation of the datetime as "wall time". This parameter must + either have a `fold` attribute or have a fold-naive + :class:`datetime.tzinfo` attached, otherwise the calculation may + fail. + """ + if self.is_ambiguous(dt_wall): + delta_wall = dt_wall - dt_utc + _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) + else: + _fold = 0 + + return _fold + + def _fold(self, dt): + return getattr(dt, 'fold', 0) + + def _fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + + # Re-implement the algorithm from Python's datetime.py + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # The original datetime.py code assumes that `dst()` defaults to + # zero during ambiguous times. PEP 495 inverts this presumption, so + # for pre-PEP 495 versions of python, we need to tweak the algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + + dt += delta + # Set fold=1 so we can default to being in the fold for + # ambiguous dates. + dtdst = enfold(dt, fold=1).dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurance, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + dt_wall = self._fromutc(dt) + + # Calculate the fold status given the two datetimes. + _fold = self._fold_status(dt, dt_wall) + + # Set the default fold value for ambiguous dates + return enfold(dt_wall, fold=_fold) + + +class tzrangebase(_tzinfo): + """ + This is an abstract base class for time zones represented by an annual + transition into and out of DST. Child classes should implement the following + methods: + + * ``__init__(self, *args, **kwargs)`` + * ``transitions(self, year)`` - this is expected to return a tuple of + datetimes representing the DST on and off transitions in standard + time. + + A fully initialized ``tzrangebase`` subclass should also provide the + following attributes: + * ``hasdst``: Boolean whether or not the zone uses DST. + * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects + representing the respective UTC offsets. + * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short + abbreviations in DST and STD, respectively. + * ``_hasdst``: Whether or not the zone has DST. + + .. versionadded:: 2.6.0 + """ + def __init__(self): + raise NotImplementedError('tzrangebase is an abstract base class') + + def utcoffset(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_base_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def fromutc(self, dt): + """ Given a datetime in UTC, return local time """ + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # Get transitions - if there are none, fixed offset + transitions = self.transitions(dt.year) + if transitions is None: + return dt + self.utcoffset(dt) + + # Get the transition times in UTC + dston, dstoff = transitions + + dston -= self._std_offset + dstoff -= self._std_offset + + utc_transitions = (dston, dstoff) + dt_utc = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt_utc, utc_transitions) + + if isdst: + dt_wall = dt + self._dst_offset + else: + dt_wall = dt + self._std_offset + + _fold = int(not isdst and self.is_ambiguous(dt_wall)) + + return enfold(dt_wall, fold=_fold) + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if not self.hasdst: + return False + + start, end = self.transitions(dt.year) + + dt = dt.replace(tzinfo=None) + return (end <= dt < end + self._dst_base_offset) + + def _isdst(self, dt): + if not self.hasdst: + return False + elif dt is None: + return None + + transitions = self.transitions(dt.year) + + if transitions is None: + return False + + dt = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt, transitions) + + # Handle ambiguous dates + if not isdst and self.is_ambiguous(dt): + return not self._fold(dt) + else: + return isdst + + def _naive_isdst(self, dt, transitions): + dston, dstoff = transitions + + dt = dt.replace(tzinfo=None) + + if dston < dstoff: + isdst = dston <= dt < dstoff + else: + isdst = not dstoff <= dt < dston + + return isdst + + @property + def _dst_base_offset(self): + return self._dst_offset - self._std_offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ diff --git a/py3/lib/python3.6/site-packages/dateutil/tz/_factories.py b/py3/lib/python3.6/site-packages/dateutil/tz/_factories.py new file mode 100644 index 0000000..d2560eb --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/tz/_factories.py @@ -0,0 +1,73 @@ +from datetime import timedelta +import weakref +from collections import OrderedDict + + +class _TzSingleton(type): + def __init__(cls, *args, **kwargs): + cls.__instance = None + super(_TzSingleton, cls).__init__(*args, **kwargs) + + def __call__(cls): + if cls.__instance is None: + cls.__instance = super(_TzSingleton, cls).__call__() + return cls.__instance + + +class _TzFactory(type): + def instance(cls, *args, **kwargs): + """Alternate constructor that returns a fresh instance""" + return type.__call__(cls, *args, **kwargs) + + +class _TzOffsetFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + def __call__(cls, name, offset): + if isinstance(offset, timedelta): + key = (name, offset.total_seconds()) + else: + key = (name, offset) + + instance = cls.__instances.get(key, None) + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(name, offset)) + + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + # TODO: Maybe this should be under a lock? + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + + +class _TzStrFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + def __call__(cls, s, posix_offset=False): + key = (s, posix_offset) + instance = cls.__instances.get(key, None) + + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(s, posix_offset)) + + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + + # Remove an item if the strong cache is overpopulated + # TODO: Maybe this should be under a lock? + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + diff --git a/py3/lib/python3.6/site-packages/dateutil/tz/tz.py b/py3/lib/python3.6/site-packages/dateutil/tz/tz.py new file mode 100644 index 0000000..d05414e --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/tz/tz.py @@ -0,0 +1,1836 @@ +# -*- coding: utf-8 -*- +""" +This module offers timezone implementations subclassing the abstract +:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format +files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, +etc), TZ environment string (in all known formats), given ranges (with help +from relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. +""" +import datetime +import struct +import time +import sys +import os +import bisect +import weakref +from collections import OrderedDict + +import six +from six import string_types +from six.moves import _thread +from ._common import tzname_in_python2, _tzinfo +from ._common import tzrangebase, enfold +from ._common import _validate_fromutc_inputs + +from ._factories import _TzSingleton, _TzOffsetFactory +from ._factories import _TzStrFactory +try: + from .win import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + +# For warning about rounding tzinfo +from warnings import warn + +ZERO = datetime.timedelta(0) +EPOCH = datetime.datetime.utcfromtimestamp(0) +EPOCHORDINAL = EPOCH.toordinal() + + +@six.add_metaclass(_TzSingleton) +class tzutc(datetime.tzinfo): + """ + This is a tzinfo object that represents the UTC time zone. + + **Examples:** + + .. doctest:: + + >>> from datetime import * + >>> from dateutil.tz import * + + >>> datetime.now() + datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) + + >>> datetime.now(tzutc()) + datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) + + >>> datetime.now(tzutc()).tzname() + 'UTC' + + .. versionchanged:: 2.7.0 + ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will + always return the same object. + + .. doctest:: + + >>> from dateutil.tz import tzutc, UTC + >>> tzutc() is tzutc() + True + >>> tzutc() is UTC + True + """ + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Fast track version of fromutc() returns the original ``dt`` object for + any valid :py:class:`datetime.datetime` object. + """ + return dt + + def __eq__(self, other): + if not isinstance(other, (tzutc, tzoffset)): + return NotImplemented + + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +@six.add_metaclass(_TzOffsetFactory) +class tzoffset(datetime.tzinfo): + """ + A simple class for representing a fixed offset from UTC. + + :param name: + The timezone name, to be returned when ``tzname()`` is called. + :param offset: + The time zone offset in seconds, or (since version 2.6.0, represented + as a :py:class:`datetime.timedelta` object). + """ + def __init__(self, name, offset): + self._name = name + + try: + # Allow a timedelta + offset = offset.total_seconds() + except (TypeError, AttributeError): + pass + + self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + @_validate_fromutc_inputs + def fromutc(self, dt): + return dt + self._offset + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + def __eq__(self, other): + if not isinstance(other, tzoffset): + return NotImplemented + + return self._offset == other._offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + int(self._offset.total_seconds())) + + __reduce__ = object.__reduce__ + + +class tzlocal(_tzinfo): + """ + A :class:`tzinfo` subclass built around the ``time`` timezone functions. + """ + def __init__(self): + super(tzlocal, self).__init__() + + self._std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + self._dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + self._dst_offset = self._std_offset + + self._dst_saved = self._dst_offset - self._std_offset + self._hasdst = bool(self._dst_saved) + self._tznames = tuple(time.tzname) + + def utcoffset(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset - self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._tznames[self._isdst(dt)] + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + naive_dst = self._naive_is_dst(dt) + return (not naive_dst and + (naive_dst != self._naive_is_dst(dt - self._dst_saved))) + + def _naive_is_dst(self, dt): + timestamp = _datetime_to_timestamp(dt) + return time.localtime(timestamp + time.timezone).tm_isdst + + def _isdst(self, dt, fold_naive=True): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # + # Here is a more stable implementation: + # + if not self._hasdst: + return False + + # Check for ambiguous times: + dstval = self._naive_is_dst(dt) + fold = getattr(dt, 'fold', None) + + if self.is_ambiguous(dt): + if fold is not None: + return not self._fold(dt) + else: + return True + + return dstval + + def __eq__(self, other): + if isinstance(other, tzlocal): + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + elif isinstance(other, tzutc): + return (not self._hasdst and + self._tznames[0] in {'UTC', 'GMT'} and + self._std_offset == ZERO) + elif isinstance(other, tzoffset): + return (not self._hasdst and + self._tznames[0] == other._name and + self._std_offset == other._offset) + else: + return NotImplemented + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", + "isstd", "isgmt", "dstoffset"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return NotImplemented + + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt and + self.dstoffset == other.dstoffset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + + +class _tzfile(object): + """ + Lightweight class for holding the relevant transition and time zone + information read from binary tzfiles. + """ + attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', + 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] + + def __init__(self, **kwargs): + for attr in self.attrs: + setattr(self, attr, kwargs.get(attr, None)) + + +class tzfile(_tzinfo): + """ + This is a ``tzinfo`` subclass thant allows one to use the ``tzfile(5)`` + format timezone files to extract current and historical zone information. + + :param fileobj: + This can be an opened file stream or a file name that the time zone + information can be read from. + + :param filename: + This is an optional parameter specifying the source of the time zone + information in the event that ``fileobj`` is a file object. If omitted + and ``fileobj`` is a file stream, this parameter will be set either to + ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. + + See `Sources for Time Zone and Daylight Saving Time Data + `_ for more information. + Time zone files can be compiled from the `IANA Time Zone database files + `_ with the `zic time zone compiler + `_ + + .. note:: + + Only construct a ``tzfile`` directly if you have a specific timezone + file on disk that you want to read into a Python ``tzinfo`` object. + If you want to get a ``tzfile`` representing a specific IANA zone, + (e.g. ``'America/New_York'``), you should call + :func:`dateutil.tz.gettz` with the zone identifier. + + + **Examples:** + + Using the US Eastern time zone as an example, we can see that a ``tzfile`` + provides time zone information for the standard Daylight Saving offsets: + + .. testsetup:: tzfile + + from dateutil.tz import gettz + from datetime import datetime + + .. doctest:: tzfile + + >>> NYC = gettz('America/New_York') + >>> NYC + tzfile('/usr/share/zoneinfo/America/New_York') + + >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST + 2016-01-03 00:00:00-05:00 + + >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT + 2016-07-07 00:00:00-04:00 + + + The ``tzfile`` structure contains a fully history of the time zone, + so historical dates will also have the right offsets. For example, before + the adoption of the UTC standards, New York used local solar mean time: + + .. doctest:: tzfile + + >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT + 1901-04-12 00:00:00-04:56 + + And during World War II, New York was on "Eastern War Time", which was a + state of permanent daylight saving time: + + .. doctest:: tzfile + + >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT + 1944-02-07 00:00:00-04:00 + + """ + + def __init__(self, fileobj, filename=None): + super(tzfile, self).__init__() + + file_opened_here = False + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + if fileobj is not None: + if not file_opened_here: + fileobj = _nullcontext(fileobj) + + with fileobj as file_stream: + tzobj = self._read_tzfile(file_stream) + + self._set_tzdata(tzobj) + + def _set_tzdata(self, tzobj): + """ Set the time zone data of this object from a _tzfile object """ + # Copy the relevant attributes over as private attributes + for attr in _tzfile.attrs: + setattr(self, '_' + attr, getattr(tzobj, attr)) + + def _read_tzfile(self, fileobj): + out = _tzfile() + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4))) + else: + out.trans_list_utc = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + out.trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + out.trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now (but seek for correct file position) + if leapcnt: + fileobj.seek(leapcnt * 8, os.SEEK_CUR) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # Build ttinfo list + out.ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + gmtoff = _get_supported_offset(gmtoff) + tti = _ttinfo() + tti.offset = gmtoff + tti.dstoffset = datetime.timedelta(0) + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + out.ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + out.ttinfo_std = None + out.ttinfo_dst = None + out.ttinfo_before = None + if out.ttinfo_list: + if not out.trans_list_utc: + out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = out.trans_idx[i] + if not out.ttinfo_std and not tti.isdst: + out.ttinfo_std = tti + elif not out.ttinfo_dst and tti.isdst: + out.ttinfo_dst = tti + + if out.ttinfo_std and out.ttinfo_dst: + break + else: + if out.ttinfo_dst and not out.ttinfo_std: + out.ttinfo_std = out.ttinfo_dst + + for tti in out.ttinfo_list: + if not tti.isdst: + out.ttinfo_before = tti + break + else: + out.ttinfo_before = out.ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + lastdst = None + lastoffset = None + lastdstoffset = None + lastbaseoffset = None + out.trans_list = [] + + for i, tti in enumerate(out.trans_idx): + offset = tti.offset + dstoffset = 0 + + if lastdst is not None: + if tti.isdst: + if not lastdst: + dstoffset = offset - lastoffset + + if not dstoffset and lastdstoffset: + dstoffset = lastdstoffset + + tti.dstoffset = datetime.timedelta(seconds=dstoffset) + lastdstoffset = dstoffset + + # If a time zone changes its base offset during a DST transition, + # then you need to adjust by the previous base offset to get the + # transition time in local time. Otherwise you use the current + # base offset. Ideally, I would have some mathematical proof of + # why this is true, but I haven't really thought about it enough. + baseoffset = offset - dstoffset + adjustment = baseoffset + if (lastbaseoffset is not None and baseoffset != lastbaseoffset + and tti.isdst != lastdst): + # The base DST has changed + adjustment = lastbaseoffset + + lastdst = tti.isdst + lastoffset = offset + lastbaseoffset = baseoffset + + out.trans_list.append(out.trans_list_utc[i] + adjustment) + + out.trans_idx = tuple(out.trans_idx) + out.trans_list = tuple(out.trans_list) + out.trans_list_utc = tuple(out.trans_list_utc) + + return out + + def _find_last_transition(self, dt, in_utc=False): + # If there's no list, there are no transitions to find + if not self._trans_list: + return None + + timestamp = _datetime_to_timestamp(dt) + + # Find where the timestamp fits in the transition list - if the + # timestamp is a transition time, it's part of the "after" period. + trans_list = self._trans_list_utc if in_utc else self._trans_list + idx = bisect.bisect_right(trans_list, timestamp) + + # We want to know when the previous transition was, so subtract off 1 + return idx - 1 + + def _get_ttinfo(self, idx): + # For no list or after the last transition, default to _ttinfo_std + if idx is None or (idx + 1) >= len(self._trans_list): + return self._ttinfo_std + + # If there is a list and the time is before it, return _ttinfo_before + if idx < 0: + return self._ttinfo_before + + return self._trans_idx[idx] + + def _find_ttinfo(self, dt): + idx = self._resolve_ambiguous_time(dt) + + return self._get_ttinfo(idx) + + def fromutc(self, dt): + """ + The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. + + :param dt: + A :py:class:`datetime.datetime` object. + + :raises TypeError: + Raised if ``dt`` is not a :py:class:`datetime.datetime` object. + + :raises ValueError: + Raised if this is called with a ``dt`` which does not have this + ``tzinfo`` attached. + + :return: + Returns a :py:class:`datetime.datetime` object representing the + wall time in ``self``'s time zone. + """ + # These isinstance checks are in datetime.tzinfo, so we'll preserve + # them, even if we don't care about duck typing. + if not isinstance(dt, datetime.datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # First treat UTC as wall time and get the transition we're in. + idx = self._find_last_transition(dt, in_utc=True) + tti = self._get_ttinfo(idx) + + dt_out = dt + datetime.timedelta(seconds=tti.offset) + + fold = self.is_ambiguous(dt_out, idx=idx) + + return enfold(dt_out, fold=int(fold)) + + def is_ambiguous(self, dt, idx=None): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if idx is None: + idx = self._find_last_transition(dt) + + # Calculate the difference in offsets from current to previous + timestamp = _datetime_to_timestamp(dt) + tti = self._get_ttinfo(idx) + + if idx is None or idx <= 0: + return False + + od = self._get_ttinfo(idx - 1).offset - tti.offset + tt = self._trans_list[idx] # Transition time + + return timestamp < tt + od + + def _resolve_ambiguous_time(self, dt): + idx = self._find_last_transition(dt) + + # If we have no transitions, return the index + _fold = self._fold(dt) + if idx is None or idx == 0: + return idx + + # If it's ambiguous and we're in a fold, shift to a different index. + idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) + + return idx - idx_offset + + def utcoffset(self, dt): + if dt is None: + return None + + if not self._ttinfo_std: + return ZERO + + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if dt is None: + return None + + if not self._ttinfo_dst: + return ZERO + + tti = self._find_ttinfo(dt) + + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.dstoffset + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std or dt is None: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return NotImplemented + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + return self.__reduce_ex__(None) + + def __reduce_ex__(self, protocol): + return (self.__class__, (None, self._filename), self.__dict__) + + +class tzrange(tzrangebase): + """ + The ``tzrange`` object is a time zone specified by a set of offsets and + abbreviations, equivalent to the way the ``TZ`` variable can be specified + in POSIX-like systems, but using Python delta objects to specify DST + start, end and offsets. + + :param stdabbr: + The abbreviation for standard time (e.g. ``'EST'``). + + :param stdoffset: + An integer or :class:`datetime.timedelta` object or equivalent + specifying the base offset from UTC. + + If unspecified, +00:00 is used. + + :param dstabbr: + The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). + + If specified, with no other DST information, DST is assumed to occur + and the default behavior or ``dstoffset``, ``start`` and ``end`` is + used. If unspecified and no other DST information is specified, it + is assumed that this zone has no DST. + + If this is unspecified and other DST information is *is* specified, + DST occurs in the zone but the time zone abbreviation is left + unchanged. + + :param dstoffset: + A an integer or :class:`datetime.timedelta` object or equivalent + specifying the UTC offset during DST. If unspecified and any other DST + information is specified, it is assumed to be the STD offset +1 hour. + + :param start: + A :class:`relativedelta.relativedelta` object or equivalent specifying + the time and time of year that daylight savings time starts. To + specify, for example, that DST starts at 2AM on the 2nd Sunday in + March, pass: + + ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` + + If unspecified and any other DST information is specified, the default + value is 2 AM on the first Sunday in April. + + :param end: + A :class:`relativedelta.relativedelta` object or equivalent + representing the time and time of year that daylight savings time + ends, with the same specification method as in ``start``. One note is + that this should point to the first time in the *standard* zone, so if + a transition occurs at 2AM in the DST zone and the clocks are set back + 1 hour to 1AM, set the ``hours`` parameter to +1. + + + **Examples:** + + .. testsetup:: tzrange + + from dateutil.tz import tzrange, tzstr + + .. doctest:: tzrange + + >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") + True + + >>> from dateutil.relativedelta import * + >>> range1 = tzrange("EST", -18000, "EDT") + >>> range2 = tzrange("EST", -18000, "EDT", -14400, + ... relativedelta(hours=+2, month=4, day=1, + ... weekday=SU(+1)), + ... relativedelta(hours=+1, month=10, day=31, + ... weekday=SU(-1))) + >>> tzstr('EST5EDT') == range1 == range2 + True + + """ + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + + global relativedelta + from dateutil import relativedelta + + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + + try: + stdoffset = stdoffset.total_seconds() + except (TypeError, AttributeError): + pass + + try: + dstoffset = dstoffset.total_seconds() + except (TypeError, AttributeError): + pass + + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = bool(self._start_delta) + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + if not self.hasdst: + return None + + base_year = datetime.datetime(year, 1, 1) + + start = base_year + self._start_delta + end = base_year + self._end_delta + + return (start, end) + + def __eq__(self, other): + if not isinstance(other, tzrange): + return NotImplemented + + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +@six.add_metaclass(_TzStrFactory) +class tzstr(tzrange): + """ + ``tzstr`` objects are time zone objects specified by a time-zone string as + it would be passed to a ``TZ`` variable on POSIX-style systems (see + the `GNU C Library: TZ Variable`_ for more details). + + There is one notable exception, which is that POSIX-style time zones use an + inverted offset format, so normally ``GMT+3`` would be parsed as an offset + 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an + offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX + behavior, pass a ``True`` value to ``posix_offset``. + + The :class:`tzrange` object provides the same functionality, but is + specified using :class:`relativedelta.relativedelta` objects. rather than + strings. + + :param s: + A time zone string in ``TZ`` variable format. This can be a + :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: + :class:`unicode`) or a stream emitting unicode characters + (e.g. :class:`StringIO`). + + :param posix_offset: + Optional. If set to ``True``, interpret strings such as ``GMT+3`` or + ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the + POSIX standard. + + .. caution:: + + Prior to version 2.7.0, this function also supported time zones + in the format: + + * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` + * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` + + This format is non-standard and has been deprecated; this function + will raise a :class:`DeprecatedTZFormatWarning` until + support is removed in a future version. + + .. _`GNU C Library: TZ Variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + """ + def __init__(self, s, posix_offset=False): + global parser + from dateutil.parser import _parser as parser + + self._s = s + + res = parser._parsetz(s) + if res is None or res.any_unused_tokens: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC") and not posix_offset: + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + self.hasdst = bool(self._start_delta) + + def _delta(self, x, isend=0): + from dateutil import relativedelta + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset - self._std_offset + kwargs["seconds"] -= delta.seconds + delta.days * 86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + + +class _tzicalvtz(_tzinfo): + def __init__(self, tzid, comps=[]): + super(_tzicalvtz, self).__init__() + + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + self._cache_lock = _thread.allocate_lock() + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + + dt = dt.replace(tzinfo=None) + + try: + with self._cache_lock: + return self._cachecomp[self._cachedate.index( + (dt, self._fold(dt)))] + except ValueError: + pass + + lastcompdt = None + lastcomp = None + + for comp in self._comps: + compdt = self._find_compdt(comp, dt) + + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + + with self._cache_lock: + self._cachedate.insert(0, (dt, self._fold(dt))) + self._cachecomp.insert(0, lastcomp) + + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + + return lastcomp + + def _find_compdt(self, comp, dt): + if comp.tzoffsetdiff < ZERO and self._fold(dt): + dt -= comp.tzoffsetdiff + + compdt = comp.rrule.before(dt, inc=True) + + return compdt + + def utcoffset(self, dt): + if dt is None: + return None + + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "" % repr(self._tzid) + + __reduce__ = object.__reduce__ + + +class tzical(object): + """ + This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure + as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. + + :param `fileobj`: + A file or stream in iCalendar format, which should be UTF-8 encoded + with CRLF endings. + + .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 + """ + def __init__(self, fileobj): + global rrule + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') + else: + self._s = getattr(fileobj, 'name', repr(fileobj)) + fileobj = _nullcontext(fileobj) + + self._vtz = {} + + with fileobj as fobj: + self._parse_rfc(fobj.read()) + + def keys(self): + """ + Retrieves the available time zones as a list. + """ + return list(self._vtz.keys()) + + def get(self, tzid=None): + """ + Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. + + :param tzid: + If there is exactly one time zone available, omitting ``tzid`` + or passing :py:const:`None` value returns it. Otherwise a valid + key (which can be retrieved from :func:`keys`) is required. + + :raises ValueError: + Raised if ``tzid`` is not specified but there are either more + or fewer than 1 zone defined. + + :returns: + Returns either a :py:class:`datetime.tzinfo` object representing + the relevant time zone or :py:const:`None` if the ``tzid`` was + not found. + """ + if tzid is None: + if len(self._vtz) == 0: + raise ValueError("no timezones defined") + elif len(self._vtz) > 1: + raise ValueError("more than one timezone available") + tzid = next(iter(self._vtz)) + + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0] == '+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal + elif len(s) == 6: + return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal + else: + raise ValueError("invalid offset: " + s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError( + "at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + # DTSTART in VTIMEZONE takes a subset of valid RRULE + # values under RFC 5545. + for parm in parms: + if parm != 'VALUE=DATE-TIME': + msg = ('Unsupported DTSTART param in ' + + 'VTIMEZONE: ' + parm) + raise ValueError(msg) + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError( + "unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", + "/usr/lib/zoneinfo", + "/usr/share/lib/zoneinfo", + "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + + +def __get_gettz(): + tzlocal_classes = (tzlocal,) + if tzwinlocal is not None: + tzlocal_classes += (tzwinlocal,) + + class GettzFunc(object): + """ + Retrieve a time zone object from a string representation + + This function is intended to retrieve the :py:class:`tzinfo` subclass + that best represents the time zone that would be used if a POSIX + `TZ variable`_ were set to the same value. + + If no argument or an empty string is passed to ``gettz``, local time + is returned: + + .. code-block:: python3 + + >>> gettz() + tzfile('/etc/localtime') + + This function is also the preferred way to map IANA tz database keys + to :class:`tzfile` objects: + + .. code-block:: python3 + + >>> gettz('Pacific/Kiritimati') + tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') + + On Windows, the standard is extended to include the Windows-specific + zone names provided by the operating system: + + .. code-block:: python3 + + >>> gettz('Egypt Standard Time') + tzwin('Egypt Standard Time') + + Passing a GNU ``TZ`` style string time zone specification returns a + :class:`tzstr` object: + + .. code-block:: python3 + + >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + + :param name: + A time zone name (IANA, or, on Windows, Windows keys), location of + a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone + specifier. An empty string, no argument or ``None`` is interpreted + as local time. + + :return: + Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` + subclasses. + + .. versionchanged:: 2.7.0 + + After version 2.7.0, any two calls to ``gettz`` using the same + input strings will return the same object: + + .. code-block:: python3 + + >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') + True + + In addition to improving performance, this ensures that + `"same zone" semantics`_ are used for datetimes in the same zone. + + + .. _`TZ variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + + .. _`"same zone" semantics`: + https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html + """ + def __init__(self): + + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache_size = 8 + self.__strong_cache = OrderedDict() + self._cache_lock = _thread.allocate_lock() + + def __call__(self, name=None): + with self._cache_lock: + rv = self.__instances.get(name, None) + + if rv is None: + rv = self.nocache(name=name) + if not (name is None + or isinstance(rv, tzlocal_classes) + or rv is None): + # tzlocal is slightly more complicated than the other + # time zone providers because it depends on environment + # at construction time, so don't cache that. + # + # We also cannot store weak references to None, so we + # will also not store that. + self.__instances[name] = rv + else: + # No need for strong caching, return immediately + return rv + + self.__strong_cache[name] = self.__strong_cache.pop(name, rv) + + if len(self.__strong_cache) > self.__strong_cache_size: + self.__strong_cache.popitem(last=False) + + return rv + + def set_cache_size(self, size): + with self._cache_lock: + self.__strong_cache_size = size + while len(self.__strong_cache) > size: + self.__strong_cache.popitem(last=False) + + def cache_clear(self): + with self._cache_lock: + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache.clear() + + @staticmethod + def nocache(name=None): + """A non-cached version of gettz""" + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name == ":": + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + if name.startswith(":"): + name = name[1:] + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin is not None: + try: + tz = tzwin(name) + except (WindowsError, UnicodeEncodeError): + # UnicodeEncodeError is for Python 2.7 compat + tz = None + + if not tz: + from dateutil.zoneinfo import get_zonefile_instance + tz = get_zonefile_instance().get(name) + + if not tz: + for c in name: + # name is not a tzstr unless it has at least + # one offset. For short values of "name", an + # explicit for loop seems to be the fastest way + # To determine if a string contains a digit + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = tzutc() + elif name in time.tzname: + tz = tzlocal() + return tz + + return GettzFunc() + + +gettz = __get_gettz() +del __get_gettz + + +def datetime_exists(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + would fall in a gap. + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" exists in + ``tz``. + + .. versionadded:: 2.7.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + tz = dt.tzinfo + + dt = dt.replace(tzinfo=None) + + # This is essentially a test of whether or not the datetime can survive + # a round trip to UTC. + dt_rt = dt.replace(tzinfo=tz).astimezone(tzutc()).astimezone(tz) + dt_rt = dt_rt.replace(tzinfo=None) + + return dt == dt_rt + + +def datetime_ambiguous(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + is ambiguous (i.e if there are two times differentiated only by their DST + status). + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" is ambiguous in + ``tz``. + + .. versionadded:: 2.6.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + + tz = dt.tzinfo + + # If a time zone defines its own "is_ambiguous" function, we'll use that. + is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) + if is_ambiguous_fn is not None: + try: + return tz.is_ambiguous(dt) + except Exception: + pass + + # If it doesn't come out and tell us it's ambiguous, we'll just check if + # the fold attribute has any effect on this particular date and time. + dt = dt.replace(tzinfo=tz) + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dst = wall_0.dst() == wall_1.dst() + + return not (same_offset and same_dst) + + +def resolve_imaginary(dt): + """ + Given a datetime that may be imaginary, return an existing datetime. + + This function assumes that an imaginary datetime represents what the + wall time would be in a zone had the offset transition not occurred, so + it will always fall forward by the transition's change in offset. + + .. doctest:: + + >>> from dateutil import tz + >>> from datetime import datetime + >>> NYC = tz.gettz('America/New_York') + >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) + 2017-03-12 03:30:00-04:00 + + >>> KIR = tz.gettz('Pacific/Kiritimati') + >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) + 1995-01-02 12:30:00+14:00 + + As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, + existing datetime, so a round-trip to and from UTC is sufficient to get + an extant datetime, however, this generally "falls back" to an earlier time + rather than falling forward to the STD side (though no guarantees are made + about this behavior). + + :param dt: + A :class:`datetime.datetime` which may or may not exist. + + :return: + Returns an existing :class:`datetime.datetime`. If ``dt`` was not + imaginary, the datetime returned is guaranteed to be the same object + passed to the function. + + .. versionadded:: 2.7.0 + """ + if dt.tzinfo is not None and not datetime_exists(dt): + + curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() + old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() + + dt += curr_offset - old_offset + + return dt + + +def _datetime_to_timestamp(dt): + """ + Convert a :class:`datetime.datetime` object to an epoch timestamp in + seconds since January 1, 1970, ignoring the time zone. + """ + return (dt.replace(tzinfo=None) - EPOCH).total_seconds() + + +if sys.version_info >= (3, 6): + def _get_supported_offset(second_offset): + return second_offset +else: + def _get_supported_offset(second_offset): + # For python pre-3.6, round to full-minutes if that's not the case. + # Python's datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 + # for some information. + old_offset = second_offset + calculated_offset = 60 * ((second_offset + 30) // 60) + return calculated_offset + + +try: + # Python 3.7 feature + from contextmanager import nullcontext as _nullcontext +except ImportError: + class _nullcontext(object): + """ + Class for wrapping contexts so that they are passed through in a + with statement. + """ + def __init__(self, context): + self.context = context + + def __enter__(self): + return self.context + + def __exit__(*args, **kwargs): + pass + +# vim:ts=4:sw=4:et diff --git a/py3/lib/python3.6/site-packages/dateutil/tz/win.py b/py3/lib/python3.6/site-packages/dateutil/tz/win.py new file mode 100644 index 0000000..cde07ba --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/tz/win.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +""" +This module provides an interface to the native time zone data on Windows, +including :py:class:`datetime.tzinfo` implementations. + +Attempting to import this module on a non-Windows platform will raise an +:py:obj:`ImportError`. +""" +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct + +from six.moves import winreg +from six import text_type + +try: + import ctypes + from ctypes import wintypes +except ValueError: + # ValueError is raised on non-Windows systems for some horrible reason. + raise ImportError("Running tzwin on non-Windows system") + +from ._common import tzrangebase + +__all__ = ["tzwin", "tzwinlocal", "tzres"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + + +def _settzkeyname(): + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + return TZKEYNAME + + +TZKEYNAME = _settzkeyname() + + +class tzres(object): + """ + Class for accessing ``tzres.dll``, which contains timezone name related + resources. + + .. versionadded:: 2.5.0 + """ + p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char + + def __init__(self, tzres_loc='tzres.dll'): + # Load the user32 DLL so we can load strings from tzres + user32 = ctypes.WinDLL('user32') + + # Specify the LoadStringW function + user32.LoadStringW.argtypes = (wintypes.HINSTANCE, + wintypes.UINT, + wintypes.LPWSTR, + ctypes.c_int) + + self.LoadStringW = user32.LoadStringW + self._tzres = ctypes.WinDLL(tzres_loc) + self.tzres_loc = tzres_loc + + def load_name(self, offset): + """ + Load a timezone name from a DLL offset (integer). + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.load_name(112)) + 'Eastern Standard Time' + + :param offset: + A positive integer value referring to a string from the tzres dll. + + .. note:: + + Offsets found in the registry are generally of the form + ``@tzres.dll,-114``. The offset in this case is 114, not -114. + + """ + resource = self.p_wchar() + lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) + nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) + return resource[:nchar] + + def name_from_string(self, tzname_str): + """ + Parse strings as returned from the Windows registry into the time zone + name as defined in the registry. + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.name_from_string('@tzres.dll,-251')) + 'Dateline Daylight Time' + >>> print(tzr.name_from_string('Eastern Standard Time')) + 'Eastern Standard Time' + + :param tzname_str: + A timezone name string as returned from a Windows registry key. + + :return: + Returns the localized timezone string from tzres.dll if the string + is of the form `@tzres.dll,-offset`, else returns the input string. + """ + if not tzname_str.startswith('@'): + return tzname_str + + name_splt = tzname_str.split(',-') + try: + offset = int(name_splt[1]) + except: + raise ValueError("Malformed timezone string.") + + return self.load_name(offset) + + +class tzwinbase(tzrangebase): + """tzinfo class based on win32's timezones available in the registry.""" + def __init__(self): + raise NotImplementedError('tzwinbase is an abstract base class') + + def __eq__(self, other): + # Compare on all relevant dimensions, including name. + if not isinstance(other, tzwinbase): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._stddayofweek == other._stddayofweek and + self._dstdayofweek == other._dstdayofweek and + self._stdweeknumber == other._stdweeknumber and + self._dstweeknumber == other._dstweeknumber and + self._stdhour == other._stdhour and + self._dsthour == other._dsthour and + self._stdminute == other._stdminute and + self._dstminute == other._dstminute and + self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr) + + @staticmethod + def list(): + """Return a list of all time zones known to the system.""" + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZKEYNAME) as tzkey: + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + return result + + def display(self): + """ + Return the display name of the time zone. + """ + return self._display + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + + if not self.hasdst: + return None + + dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + + dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + + # Ambiguous dates default to the STD side + dstoff -= self._dst_base_offset + + return dston, dstoff + + def _get_hasdst(self): + return self._dstmonth != 0 + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzwin(tzwinbase): + """ + Time zone object created from the zone info in the Windows registry + + These are similar to :py:class:`dateutil.tz.tzrange` objects in that + the time zone data is provided in the format of a single offset rule + for either 0 or 2 time zone transitions per year. + + :param: name + The name of a Windows time zone key, e.g. "Eastern Standard Time". + The full list of keys can be retrieved with :func:`tzwin.list`. + """ + + def __init__(self, name): + self._name = name + + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + keydict = valuestodict(tzkey) + + self._std_abbr = keydict["Std"] + self._dst_abbr = keydict["Dlt"] + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + dstoffset = stdoffset-tup[2] # + DaylightBias * -1 + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + """ + Class representing the local time zone information in the Windows registry + + While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` + module) to retrieve time zone information, ``tzwinlocal`` retrieves the + rules directly from the Windows registry and creates an object like + :class:`dateutil.tz.tzwin`. + + Because Windows does not have an equivalent of :func:`time.tzset`, on + Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the + time zone settings *at the time that the process was started*, meaning + changes to the machine's time zone settings during the run of a program + on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. + Because ``tzwinlocal`` reads the registry directly, it is unaffected by + this issue. + """ + def __init__(self): + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) + + self._std_abbr = keydict["StandardName"] + self._dst_abbr = keydict["DaylightName"] + + try: + tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, + sn=self._std_abbr) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None + + stdoffset = -keydict["Bias"]-keydict["StandardBias"] + dstoffset = stdoffset-keydict["DaylightBias"] + + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # For reasons unclear, in this particular key, the day of week has been + # moved to the END of the SYSTEMTIME structure. + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:5] + + self._stddayofweek = tup[7] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:5] + + self._dstdayofweek = tup[7] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwinlocal()" + + def __str__(self): + # str will return the standard name, not the daylight name. + return "tzwinlocal(%s)" % repr(self._std_abbr) + + def __reduce__(self): + return (self.__class__, ()) + + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ + first = datetime.datetime(year, month, 1, hour, minute) + + # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), + # Because 7 % 7 = 0 + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) + wd = weekdayone + ((whichweek - 1) * ONEWEEK) + if (wd.month != month): + wd -= ONEWEEK + + return wd + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dout = {} + size = winreg.QueryInfoKey(key)[1] + tz_res = None + + for i in range(size): + key_name, value, dtype = winreg.EnumValue(key, i) + if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: + # If it's a DWORD (32-bit integer), it's stored as unsigned - convert + # that to a proper signed integer + if value & (1 << 31): + value = value - (1 << 32) + elif dtype == winreg.REG_SZ: + # If it's a reference to the tzres DLL, load the actual string + if value.startswith('@tzres'): + tz_res = tz_res or tzres() + value = tz_res.name_from_string(value) + + value = value.rstrip('\x00') # Remove trailing nulls + + dout[key_name] = value + + return dout diff --git a/py3/lib/python3.6/site-packages/dateutil/tzwin.py b/py3/lib/python3.6/site-packages/dateutil/tzwin.py new file mode 100644 index 0000000..cebc673 --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/tzwin.py @@ -0,0 +1,2 @@ +# tzwin has moved to dateutil.tz.win +from .tz.win import * diff --git a/py3/lib/python3.6/site-packages/dateutil/utils.py b/py3/lib/python3.6/site-packages/dateutil/utils.py new file mode 100644 index 0000000..ebcce6a --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/utils.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +This module offers general convenience and utility functions for dealing with +datetimes. + +.. versionadded:: 2.7.0 +""" +from __future__ import unicode_literals + +from datetime import datetime, time + + +def today(tzinfo=None): + """ + Returns a :py:class:`datetime` representing the current day at midnight + + :param tzinfo: + The time zone to attach (also used to determine the current day). + + :return: + A :py:class:`datetime.datetime` object representing the current day + at midnight. + """ + + dt = datetime.now(tzinfo) + return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) + + +def default_tzinfo(dt, tzinfo): + """ + Sets the the ``tzinfo`` parameter on naive datetimes only + + This is useful for example when you are provided a datetime that may have + either an implicit or explicit time zone, such as when parsing a time zone + string. + + .. doctest:: + + >>> from dateutil.tz import tzoffset + >>> from dateutil.parser import parse + >>> from dateutil.utils import default_tzinfo + >>> dflt_tz = tzoffset("EST", -18000) + >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) + 2014-01-01 12:30:00+00:00 + >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) + 2014-01-01 12:30:00-05:00 + + :param dt: + The datetime on which to replace the time zone + + :param tzinfo: + The :py:class:`datetime.tzinfo` subclass instance to assign to + ``dt`` if (and only if) it is naive. + + :return: + Returns an aware :py:class:`datetime.datetime`. + """ + if dt.tzinfo is not None: + return dt + else: + return dt.replace(tzinfo=tzinfo) + + +def within_delta(dt1, dt2, delta): + """ + Useful for comparing two datetimes that may a negilible difference + to be considered equal. + """ + delta = abs(delta) + difference = dt1 - dt2 + return -delta <= difference <= delta diff --git a/py3/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py b/py3/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py new file mode 100644 index 0000000..34f11ad --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +import warnings +import json + +from tarfile import TarFile +from pkgutil import get_data +from io import BytesIO + +from dateutil.tz import tzfile as _tzfile + +__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] + +ZONEFILENAME = "dateutil-zoneinfo.tar.gz" +METADATA_FN = 'METADATA' + + +class tzfile(_tzfile): + def __reduce__(self): + return (gettz, (self._filename,)) + + +def getzoneinfofile_stream(): + try: + return BytesIO(get_data(__name__, ZONEFILENAME)) + except IOError as e: # TODO switch to FileNotFoundError? + warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) + return None + + +class ZoneInfoFile(object): + def __init__(self, zonefile_stream=None): + if zonefile_stream is not None: + with TarFile.open(fileobj=zonefile_stream) as tf: + self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) + for zf in tf.getmembers() + if zf.isfile() and zf.name != METADATA_FN} + # deal with links: They'll point to their parent object. Less + # waste of memory + links = {zl.name: self.zones[zl.linkname] + for zl in tf.getmembers() if + zl.islnk() or zl.issym()} + self.zones.update(links) + try: + metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) + metadata_str = metadata_json.read().decode('UTF-8') + self.metadata = json.loads(metadata_str) + except KeyError: + # no metadata in tar file + self.metadata = None + else: + self.zones = {} + self.metadata = None + + def get(self, name, default=None): + """ + Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method + for retrieving zones from the zone dictionary. + + :param name: + The name of the zone to retrieve. (Generally IANA zone names) + + :param default: + The value to return in the event of a missing key. + + .. versionadded:: 2.6.0 + + """ + return self.zones.get(name, default) + + +# The current API has gettz as a module function, although in fact it taps into +# a stateful class. So as a workaround for now, without changing the API, we +# will create a new "global" class instance the first time a user requests a +# timezone. Ugly, but adheres to the api. +# +# TODO: Remove after deprecation period. +_CLASS_ZONE_INSTANCE = [] + + +def get_zonefile_instance(new_instance=False): + """ + This is a convenience function which provides a :class:`ZoneInfoFile` + instance using the data provided by the ``dateutil`` package. By default, it + caches a single instance of the ZoneInfoFile object and returns that. + + :param new_instance: + If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and + used as the cached instance for the next call. Otherwise, new instances + are created only as necessary. + + :return: + Returns a :class:`ZoneInfoFile` object. + + .. versionadded:: 2.6 + """ + if new_instance: + zif = None + else: + zif = getattr(get_zonefile_instance, '_cached_instance', None) + + if zif is None: + zif = ZoneInfoFile(getzoneinfofile_stream()) + + get_zonefile_instance._cached_instance = zif + + return zif + + +def gettz(name): + """ + This retrieves a time zone from the local zoneinfo tarball that is packaged + with dateutil. + + :param name: + An IANA-style time zone name, as found in the zoneinfo file. + + :return: + Returns a :class:`dateutil.tz.tzfile` time zone object. + + .. warning:: + It is generally inadvisable to use this function, and it is only + provided for API compatibility with earlier versions. This is *not* + equivalent to ``dateutil.tz.gettz()``, which selects an appropriate + time zone based on the inputs, favoring system zoneinfo. This is ONLY + for accessing the dateutil-specific zoneinfo (which may be out of + date compared to the system zoneinfo). + + .. deprecated:: 2.6 + If you need to use a specific zoneinfofile over the system zoneinfo, + instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call + :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. + + Use :func:`get_zonefile_instance` to retrieve an instance of the + dateutil-provided zoneinfo. + """ + warnings.warn("zoneinfo.gettz() will be removed in future versions, " + "to use the dateutil-provided zoneinfo files, instantiate a " + "ZoneInfoFile object and use ZoneInfoFile.zones.get() " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].zones.get(name) + + +def gettz_db_metadata(): + """ Get the zonefile metadata + + See `zonefile_metadata`_ + + :returns: + A dictionary with the database metadata + + .. deprecated:: 2.6 + See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, + query the attribute ``zoneinfo.ZoneInfoFile.metadata``. + """ + warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " + "versions, to use the dateutil-provided zoneinfo files, " + "ZoneInfoFile object and query the 'metadata' attribute " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/py3/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/py3/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 0000000..124f3e1 Binary files /dev/null and b/py3/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/py3/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py b/py3/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py new file mode 100644 index 0000000..78f0d1a --- /dev/null +++ b/py3/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py @@ -0,0 +1,53 @@ +import logging +import os +import tempfile +import shutil +import json +from subprocess import check_call +from tarfile import TarFile + +from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME + + +def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): + """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* + + filename is the timezone tarball from ``ftp.iana.org/tz``. + + """ + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + try: + with TarFile.open(filename) as tf: + for name in zonegroups: + tf.extract(name, tmpdir) + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + try: + check_call(["zic", "-d", zonedir] + filepaths) + except OSError as e: + _print_on_nosuchfile(e) + raise + # write metadata file + with open(os.path.join(zonedir, METADATA_FN), 'w') as f: + json.dump(metadata, f, indent=4, sort_keys=True) + target = os.path.join(moduledir, ZONEFILENAME) + with TarFile.open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + finally: + shutil.rmtree(tmpdir) + + +def _print_on_nosuchfile(e): + """Print helpful troubleshooting message + + e is an exception raised by subprocess.check_call() + + """ + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") diff --git a/py3/lib/python3.6/site-packages/easy-install.pth b/py3/lib/python3.6/site-packages/easy-install.pth new file mode 100644 index 0000000..b7b07c6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/easy-install.pth @@ -0,0 +1,4 @@ +./pybtc-2.0.9-py3.6.egg +./secp256k1-0.13.2-py3.6-linux-x86_64.egg +./cffi-1.12.3-py3.6-linux-x86_64.egg +./pycparser-2.19-py3.6.egg diff --git a/py3/lib/python3.6/site-packages/easy_install.py b/py3/lib/python3.6/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/py3/lib/python3.6/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/py3/lib/python3.6/site-packages/editor.py b/py3/lib/python3.6/site-packages/editor.py new file mode 100755 index 0000000..6fc73f1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/editor.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python +"""Tools for invoking editors programmatically.""" + +from __future__ import print_function + +import sys +import locale +import os.path +import subprocess +import tempfile +from distutils.spawn import find_executable + + +__all__ = [ + 'edit', + 'get_editor', + 'EditorError', +] + +__version__ = '1.0.4' + + +class EditorError(RuntimeError): + pass + + +def get_default_editors(): + # TODO: Make platform-specific + return [ + 'editor', + 'vim', + 'emacs', + 'nano', + ] + + +def get_editor_args(editor): + if editor in ['vim', 'gvim', 'vim.basic', 'vim.tiny']: + return ['-f', '-o'] + + elif editor == 'emacs': + return ['-nw'] + + elif editor == 'gedit': + return ['-w', '--new-window'] + + elif editor == 'nano': + return ['-R'] + + else: + return [] + + +def get_editor(): + # Get the editor from the environment. Prefer VISUAL to EDITOR + editor = os.environ.get('VISUAL') or os.environ.get('EDITOR') + if editor: + return editor + + # None found in the environment. Fallback to platform-specific defaults. + for ed in get_default_editors(): + path = find_executable(ed) + if path is not None: + return path + + raise EditorError("Unable to find a viable editor on this system." + "Please consider setting your $EDITOR variable") + + +def get_tty_filename(): + if sys.platform == 'win32': + return 'CON:' + return '/dev/tty' + + +def edit(filename=None, contents=None, use_tty=None, suffix=''): + editor = get_editor() + args = [editor] + get_editor_args(os.path.basename(os.path.realpath(editor))) + + if use_tty is None: + use_tty = sys.stdin.isatty() and not sys.stdout.isatty() + + if filename is None: + tmp = tempfile.NamedTemporaryFile(suffix=suffix) + filename = tmp.name + + if contents is not None: + # For python3 only. If str is passed instead of bytes, encode default + if hasattr(contents, 'encode'): + contents = contents.encode() + + with open(filename, mode='wb') as f: + f.write(contents) + + args += [filename] + + stdout = None + if use_tty: + stdout = open(get_tty_filename(), 'wb') + + proc = subprocess.Popen(args, close_fds=True, stdout=stdout) + proc.communicate() + + with open(filename, mode='rb') as f: + return f.read() + + +def _get_editor(ns): + print(get_editor()) + + +def _edit(ns): + contents = ns.contents + if contents is not None: + contents = contents.encode(locale.getpreferredencoding()) + print(edit(filename=ns.path, contents=contents)) + + +if __name__ == '__main__': + import argparse + ap = argparse.ArgumentParser() + sp = ap.add_subparsers() + + cmd = sp.add_parser('get-editor') + cmd.set_defaults(cmd=_get_editor) + + cmd = sp.add_parser('edit') + cmd.set_defaults(cmd=_edit) + cmd.add_argument('path', type=str, nargs='?') + cmd.add_argument('--contents', type=str) + + ns = ap.parse_args() + ns.cmd(ns) diff --git a/py3/lib/python3.6/site-packages/flask/__init__.py b/py3/lib/python3.6/site-packages/flask/__init__.py new file mode 100644 index 0000000..687475b --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/__init__.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" + flask + ~~~~~ + + A microframework based on Werkzeug. It's extensively documented + and follows best practice patterns. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +# utilities we import from Werkzeug and Jinja2 that are unused +# in the module but are exported as public interface. +from jinja2 import escape +from jinja2 import Markup +from werkzeug.exceptions import abort +from werkzeug.utils import redirect + +from . import json +from ._compat import json_available +from .app import Flask +from .app import Request +from .app import Response +from .blueprints import Blueprint +from .config import Config +from .ctx import after_this_request +from .ctx import copy_current_request_context +from .ctx import has_app_context +from .ctx import has_request_context +from .globals import _app_ctx_stack +from .globals import _request_ctx_stack +from .globals import current_app +from .globals import g +from .globals import request +from .globals import session +from .helpers import flash +from .helpers import get_flashed_messages +from .helpers import get_template_attribute +from .helpers import make_response +from .helpers import safe_join +from .helpers import send_file +from .helpers import send_from_directory +from .helpers import stream_with_context +from .helpers import url_for +from .json import jsonify +from .signals import appcontext_popped +from .signals import appcontext_pushed +from .signals import appcontext_tearing_down +from .signals import before_render_template +from .signals import got_request_exception +from .signals import message_flashed +from .signals import request_finished +from .signals import request_started +from .signals import request_tearing_down +from .signals import signals_available +from .signals import template_rendered +from .templating import render_template +from .templating import render_template_string + +__version__ = "1.1.1" diff --git a/py3/lib/python3.6/site-packages/flask/__main__.py b/py3/lib/python3.6/site-packages/flask/__main__.py new file mode 100644 index 0000000..f61dbc0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/__main__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" + flask.__main__ + ~~~~~~~~~~~~~~ + + Alias for flask.run for the command line. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" + +if __name__ == "__main__": + from .cli import main + + main(as_module=True) diff --git a/py3/lib/python3.6/site-packages/flask/_compat.py b/py3/lib/python3.6/site-packages/flask/_compat.py new file mode 100644 index 0000000..76c442c --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/_compat.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +""" + flask._compat + ~~~~~~~~~~~~~ + + Some py2/py3 compatibility support based on a stripped down + version of six so we don't have to depend on a specific version + of it. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +import sys + +PY2 = sys.version_info[0] == 2 +_identity = lambda x: x + +try: # Python 2 + text_type = unicode + string_types = (str, unicode) + integer_types = (int, long) +except NameError: # Python 3 + text_type = str + string_types = (str,) + integer_types = (int,) + +if not PY2: + iterkeys = lambda d: iter(d.keys()) + itervalues = lambda d: iter(d.values()) + iteritems = lambda d: iter(d.items()) + + from inspect import getfullargspec as getargspec + from io import StringIO + import collections.abc as collections_abc + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + implements_to_string = _identity + +else: + iterkeys = lambda d: d.iterkeys() + itervalues = lambda d: d.itervalues() + iteritems = lambda d: d.iteritems() + + from inspect import getargspec + from cStringIO import StringIO + import collections as collections_abc + + exec("def reraise(tp, value, tb=None):\n raise tp, value, tb") + + def implements_to_string(cls): + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda x: x.__unicode__().encode("utf-8") + return cls + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a + # dummy metaclass for one level of class instantiation that replaces + # itself with the actual metaclass. + class metaclass(type): + def __new__(metacls, name, this_bases, d): + return meta(name, bases, d) + + return type.__new__(metaclass, "temporary_class", (), {}) + + +# Certain versions of pypy have a bug where clearing the exception stack +# breaks the __exit__ function in a very peculiar way. The second level of +# exception blocks is necessary because pypy seems to forget to check if an +# exception happened until the next bytecode instruction? +# +# Relevant PyPy bugfix commit: +# https://bitbucket.org/pypy/pypy/commits/77ecf91c635a287e88e60d8ddb0f4e9df4003301 +# According to ronan on #pypy IRC, it is released in PyPy2 2.3 and later +# versions. +# +# Ubuntu 14.04 has PyPy 2.2.1, which does exhibit this bug. +BROKEN_PYPY_CTXMGR_EXIT = False +if hasattr(sys, "pypy_version_info"): + + class _Mgr(object): + def __enter__(self): + return self + + def __exit__(self, *args): + if hasattr(sys, "exc_clear"): + # Python 3 (PyPy3) doesn't have exc_clear + sys.exc_clear() + + try: + try: + with _Mgr(): + raise AssertionError() + except: # noqa: B001 + # We intentionally use a bare except here. See the comment above + # regarding a pypy bug as to why. + raise + except TypeError: + BROKEN_PYPY_CTXMGR_EXIT = True + except AssertionError: + pass + + +try: + from os import fspath +except ImportError: + # Backwards compatibility as proposed in PEP 0519: + # https://www.python.org/dev/peps/pep-0519/#backwards-compatibility + def fspath(path): + return path.__fspath__() if hasattr(path, "__fspath__") else path + + +class _DeprecatedBool(object): + def __init__(self, name, version, value): + self.message = "'{}' is deprecated and will be removed in version {}.".format( + name, version + ) + self.value = value + + def _warn(self): + import warnings + + warnings.warn(self.message, DeprecationWarning, stacklevel=2) + + def __eq__(self, other): + self._warn() + return other == self.value + + def __ne__(self, other): + self._warn() + return other != self.value + + def __bool__(self): + self._warn() + return self.value + + __nonzero__ = __bool__ + + +json_available = _DeprecatedBool("flask.json_available", "2.0.0", True) diff --git a/py3/lib/python3.6/site-packages/flask/app.py b/py3/lib/python3.6/site-packages/flask/app.py new file mode 100644 index 0000000..e596fe5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/app.py @@ -0,0 +1,2466 @@ +# -*- coding: utf-8 -*- +""" + flask.app + ~~~~~~~~~ + + This module implements the central WSGI application object. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +import os +import sys +import warnings +from datetime import timedelta +from functools import update_wrapper +from itertools import chain +from threading import Lock + +from werkzeug.datastructures import Headers +from werkzeug.datastructures import ImmutableDict +from werkzeug.exceptions import BadRequest +from werkzeug.exceptions import BadRequestKeyError +from werkzeug.exceptions import default_exceptions +from werkzeug.exceptions import HTTPException +from werkzeug.exceptions import InternalServerError +from werkzeug.exceptions import MethodNotAllowed +from werkzeug.routing import BuildError +from werkzeug.routing import Map +from werkzeug.routing import RequestRedirect +from werkzeug.routing import RoutingException +from werkzeug.routing import Rule +from werkzeug.wrappers import BaseResponse + +from . import cli +from . import json +from ._compat import integer_types +from ._compat import reraise +from ._compat import string_types +from ._compat import text_type +from .config import Config +from .config import ConfigAttribute +from .ctx import _AppCtxGlobals +from .ctx import AppContext +from .ctx import RequestContext +from .globals import _request_ctx_stack +from .globals import g +from .globals import request +from .globals import session +from .helpers import _endpoint_from_view_func +from .helpers import _PackageBoundObject +from .helpers import find_package +from .helpers import get_debug_flag +from .helpers import get_env +from .helpers import get_flashed_messages +from .helpers import get_load_dotenv +from .helpers import locked_cached_property +from .helpers import url_for +from .json import jsonify +from .logging import create_logger +from .sessions import SecureCookieSessionInterface +from .signals import appcontext_tearing_down +from .signals import got_request_exception +from .signals import request_finished +from .signals import request_started +from .signals import request_tearing_down +from .templating import _default_template_ctx_processor +from .templating import DispatchingJinjaLoader +from .templating import Environment +from .wrappers import Request +from .wrappers import Response + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +def _make_timedelta(value): + if not isinstance(value, timedelta): + return timedelta(seconds=value) + return value + + +def setupmethod(f): + """Wraps a method so that it performs a check in debug mode if the + first request was already handled. + """ + + def wrapper_func(self, *args, **kwargs): + if self.debug and self._got_first_request: + raise AssertionError( + "A setup function was called after the " + "first request was handled. This usually indicates a bug " + "in the application where a module was not imported " + "and decorators or other functionality was called too late.\n" + "To fix this make sure to import all your view modules, " + "database models and everything related at a central place " + "before the application starts serving requests." + ) + return f(self, *args, **kwargs) + + return update_wrapper(wrapper_func, f) + + +class Flask(_PackageBoundObject): + """The flask object implements a WSGI application and acts as the central + object. It is passed the name of the module or package of the + application. Once it is created it will act as a central registry for + the view functions, the URL rules, template configuration and much more. + + The name of the package is used to resolve resources from inside the + package or the folder the module is contained in depending on if the + package parameter resolves to an actual python package (a folder with + an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). + + For more information about resource loading, see :func:`open_resource`. + + Usually you create a :class:`Flask` instance in your main module or + in the :file:`__init__.py` file of your package like this:: + + from flask import Flask + app = Flask(__name__) + + .. admonition:: About the First Parameter + + The idea of the first parameter is to give Flask an idea of what + belongs to your application. This name is used to find resources + on the filesystem, can be used by extensions to improve debugging + information and a lot more. + + So it's important what you provide there. If you are using a single + module, `__name__` is always the correct value. If you however are + using a package, it's usually recommended to hardcode the name of + your package there. + + For example if your application is defined in :file:`yourapplication/app.py` + you should create it with one of the two versions below:: + + app = Flask('yourapplication') + app = Flask(__name__.split('.')[0]) + + Why is that? The application will work even with `__name__`, thanks + to how resources are looked up. However it will make debugging more + painful. Certain extensions can make assumptions based on the + import name of your application. For example the Flask-SQLAlchemy + extension will look for the code in your application that triggered + an SQL query in debug mode. If the import name is not properly set + up, that debugging information is lost. (For example it would only + pick up SQL queries in `yourapplication.app` and not + `yourapplication.views.frontend`) + + .. versionadded:: 0.7 + The `static_url_path`, `static_folder`, and `template_folder` + parameters were added. + + .. versionadded:: 0.8 + The `instance_path` and `instance_relative_config` parameters were + added. + + .. versionadded:: 0.11 + The `root_path` parameter was added. + + .. versionadded:: 1.0 + The ``host_matching`` and ``static_host`` parameters were added. + + .. versionadded:: 1.0 + The ``subdomain_matching`` parameter was added. Subdomain + matching needs to be enabled manually now. Setting + :data:`SERVER_NAME` does not implicitly enable it. + + :param import_name: the name of the application package + :param static_url_path: can be used to specify a different path for the + static files on the web. Defaults to the name + of the `static_folder` folder. + :param static_folder: the folder with static files that should be served + at `static_url_path`. Defaults to the ``'static'`` + folder in the root path of the application. + :param static_host: the host to use when adding the static route. + Defaults to None. Required when using ``host_matching=True`` + with a ``static_folder`` configured. + :param host_matching: set ``url_map.host_matching`` attribute. + Defaults to False. + :param subdomain_matching: consider the subdomain relative to + :data:`SERVER_NAME` when matching routes. Defaults to False. + :param template_folder: the folder that contains the templates that should + be used by the application. Defaults to + ``'templates'`` folder in the root path of the + application. + :param instance_path: An alternative instance path for the application. + By default the folder ``'instance'`` next to the + package or module is assumed to be the instance + path. + :param instance_relative_config: if set to ``True`` relative filenames + for loading the config are assumed to + be relative to the instance path instead + of the application root. + :param root_path: Flask by default will automatically calculate the path + to the root of the application. In certain situations + this cannot be achieved (for instance if the package + is a Python 3 namespace package) and needs to be + manually defined. + """ + + #: The class that is used for request objects. See :class:`~flask.Request` + #: for more information. + request_class = Request + + #: The class that is used for response objects. See + #: :class:`~flask.Response` for more information. + response_class = Response + + #: The class that is used for the Jinja environment. + #: + #: .. versionadded:: 0.11 + jinja_environment = Environment + + #: The class that is used for the :data:`~flask.g` instance. + #: + #: Example use cases for a custom class: + #: + #: 1. Store arbitrary attributes on flask.g. + #: 2. Add a property for lazy per-request database connectors. + #: 3. Return None instead of AttributeError on unexpected attributes. + #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. + #: + #: In Flask 0.9 this property was called `request_globals_class` but it + #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the + #: flask.g object is now application context scoped. + #: + #: .. versionadded:: 0.10 + app_ctx_globals_class = _AppCtxGlobals + + #: The class that is used for the ``config`` attribute of this app. + #: Defaults to :class:`~flask.Config`. + #: + #: Example use cases for a custom class: + #: + #: 1. Default values for certain config options. + #: 2. Access to config values through attributes in addition to keys. + #: + #: .. versionadded:: 0.11 + config_class = Config + + #: The testing flag. Set this to ``True`` to enable the test mode of + #: Flask extensions (and in the future probably also Flask itself). + #: For example this might activate test helpers that have an + #: additional runtime cost which should not be enabled by default. + #: + #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the + #: default it's implicitly enabled. + #: + #: This attribute can also be configured from the config with the + #: ``TESTING`` configuration key. Defaults to ``False``. + testing = ConfigAttribute("TESTING") + + #: If a secret key is set, cryptographic components can use this to + #: sign cookies and other things. Set this to a complex random value + #: when you want to use the secure cookie for instance. + #: + #: This attribute can also be configured from the config with the + #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. + secret_key = ConfigAttribute("SECRET_KEY") + + #: The secure cookie uses this for the name of the session cookie. + #: + #: This attribute can also be configured from the config with the + #: ``SESSION_COOKIE_NAME`` configuration key. Defaults to ``'session'`` + session_cookie_name = ConfigAttribute("SESSION_COOKIE_NAME") + + #: A :class:`~datetime.timedelta` which is used to set the expiration + #: date of a permanent session. The default is 31 days which makes a + #: permanent session survive for roughly one month. + #: + #: This attribute can also be configured from the config with the + #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to + #: ``timedelta(days=31)`` + permanent_session_lifetime = ConfigAttribute( + "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta + ) + + #: A :class:`~datetime.timedelta` which is used as default cache_timeout + #: for the :func:`send_file` functions. The default is 12 hours. + #: + #: This attribute can also be configured from the config with the + #: ``SEND_FILE_MAX_AGE_DEFAULT`` configuration key. This configuration + #: variable can also be set with an integer value used as seconds. + #: Defaults to ``timedelta(hours=12)`` + send_file_max_age_default = ConfigAttribute( + "SEND_FILE_MAX_AGE_DEFAULT", get_converter=_make_timedelta + ) + + #: Enable this if you want to use the X-Sendfile feature. Keep in + #: mind that the server has to support this. This only affects files + #: sent with the :func:`send_file` method. + #: + #: .. versionadded:: 0.2 + #: + #: This attribute can also be configured from the config with the + #: ``USE_X_SENDFILE`` configuration key. Defaults to ``False``. + use_x_sendfile = ConfigAttribute("USE_X_SENDFILE") + + #: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`. + #: + #: .. versionadded:: 0.10 + json_encoder = json.JSONEncoder + + #: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`. + #: + #: .. versionadded:: 0.10 + json_decoder = json.JSONDecoder + + #: Options that are passed to the Jinja environment in + #: :meth:`create_jinja_environment`. Changing these options after + #: the environment is created (accessing :attr:`jinja_env`) will + #: have no effect. + #: + #: .. versionchanged:: 1.1.0 + #: This is a ``dict`` instead of an ``ImmutableDict`` to allow + #: easier configuration. + #: + jinja_options = {"extensions": ["jinja2.ext.autoescape", "jinja2.ext.with_"]} + + #: Default configuration parameters. + default_config = ImmutableDict( + { + "ENV": None, + "DEBUG": None, + "TESTING": False, + "PROPAGATE_EXCEPTIONS": None, + "PRESERVE_CONTEXT_ON_EXCEPTION": None, + "SECRET_KEY": None, + "PERMANENT_SESSION_LIFETIME": timedelta(days=31), + "USE_X_SENDFILE": False, + "SERVER_NAME": None, + "APPLICATION_ROOT": "/", + "SESSION_COOKIE_NAME": "session", + "SESSION_COOKIE_DOMAIN": None, + "SESSION_COOKIE_PATH": None, + "SESSION_COOKIE_HTTPONLY": True, + "SESSION_COOKIE_SECURE": False, + "SESSION_COOKIE_SAMESITE": None, + "SESSION_REFRESH_EACH_REQUEST": True, + "MAX_CONTENT_LENGTH": None, + "SEND_FILE_MAX_AGE_DEFAULT": timedelta(hours=12), + "TRAP_BAD_REQUEST_ERRORS": None, + "TRAP_HTTP_EXCEPTIONS": False, + "EXPLAIN_TEMPLATE_LOADING": False, + "PREFERRED_URL_SCHEME": "http", + "JSON_AS_ASCII": True, + "JSON_SORT_KEYS": True, + "JSONIFY_PRETTYPRINT_REGULAR": False, + "JSONIFY_MIMETYPE": "application/json", + "TEMPLATES_AUTO_RELOAD": None, + "MAX_COOKIE_SIZE": 4093, + } + ) + + #: The rule object to use for URL rules created. This is used by + #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. + #: + #: .. versionadded:: 0.7 + url_rule_class = Rule + + #: The map object to use for storing the URL rules and routing + #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. + #: + #: .. versionadded:: 1.1.0 + url_map_class = Map + + #: the test client that is used with when `test_client` is used. + #: + #: .. versionadded:: 0.7 + test_client_class = None + + #: The :class:`~click.testing.CliRunner` subclass, by default + #: :class:`~flask.testing.FlaskCliRunner` that is used by + #: :meth:`test_cli_runner`. Its ``__init__`` method should take a + #: Flask app object as the first argument. + #: + #: .. versionadded:: 1.0 + test_cli_runner_class = None + + #: the session interface to use. By default an instance of + #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. + #: + #: .. versionadded:: 0.8 + session_interface = SecureCookieSessionInterface() + + # TODO remove the next three attrs when Sphinx :inherited-members: works + # https://github.com/sphinx-doc/sphinx/issues/741 + + #: The name of the package or module that this app belongs to. Do not + #: change this once it is set by the constructor. + import_name = None + + #: Location of the template files to be added to the template lookup. + #: ``None`` if templates should not be added. + template_folder = None + + #: Absolute path to the package on the filesystem. Used to look up + #: resources contained in the package. + root_path = None + + def __init__( + self, + import_name, + static_url_path=None, + static_folder="static", + static_host=None, + host_matching=False, + subdomain_matching=False, + template_folder="templates", + instance_path=None, + instance_relative_config=False, + root_path=None, + ): + _PackageBoundObject.__init__( + self, import_name, template_folder=template_folder, root_path=root_path + ) + + self.static_url_path = static_url_path + self.static_folder = static_folder + + if instance_path is None: + instance_path = self.auto_find_instance_path() + elif not os.path.isabs(instance_path): + raise ValueError( + "If an instance path is provided it must be absolute." + " A relative path was given instead." + ) + + #: Holds the path to the instance folder. + #: + #: .. versionadded:: 0.8 + self.instance_path = instance_path + + #: The configuration dictionary as :class:`Config`. This behaves + #: exactly like a regular dictionary but supports additional methods + #: to load a config from files. + self.config = self.make_config(instance_relative_config) + + #: A dictionary of all view functions registered. The keys will + #: be function names which are also used to generate URLs and + #: the values are the function objects themselves. + #: To register a view function, use the :meth:`route` decorator. + self.view_functions = {} + + #: A dictionary of all registered error handlers. The key is ``None`` + #: for error handlers active on the application, otherwise the key is + #: the name of the blueprint. Each key points to another dictionary + #: where the key is the status code of the http exception. The + #: special key ``None`` points to a list of tuples where the first item + #: is the class for the instance check and the second the error handler + #: function. + #: + #: To register an error handler, use the :meth:`errorhandler` + #: decorator. + self.error_handler_spec = {} + + #: A list of functions that are called when :meth:`url_for` raises a + #: :exc:`~werkzeug.routing.BuildError`. Each function registered here + #: is called with `error`, `endpoint` and `values`. If a function + #: returns ``None`` or raises a :exc:`BuildError` the next function is + #: tried. + #: + #: .. versionadded:: 0.9 + self.url_build_error_handlers = [] + + #: A dictionary with lists of functions that will be called at the + #: beginning of each request. The key of the dictionary is the name of + #: the blueprint this function is active for, or ``None`` for all + #: requests. To register a function, use the :meth:`before_request` + #: decorator. + self.before_request_funcs = {} + + #: A list of functions that will be called at the beginning of the + #: first request to this instance. To register a function, use the + #: :meth:`before_first_request` decorator. + #: + #: .. versionadded:: 0.8 + self.before_first_request_funcs = [] + + #: A dictionary with lists of functions that should be called after + #: each request. The key of the dictionary is the name of the blueprint + #: this function is active for, ``None`` for all requests. This can for + #: example be used to close database connections. To register a function + #: here, use the :meth:`after_request` decorator. + self.after_request_funcs = {} + + #: A dictionary with lists of functions that are called after + #: each request, even if an exception has occurred. The key of the + #: dictionary is the name of the blueprint this function is active for, + #: ``None`` for all requests. These functions are not allowed to modify + #: the request, and their return values are ignored. If an exception + #: occurred while processing the request, it gets passed to each + #: teardown_request function. To register a function here, use the + #: :meth:`teardown_request` decorator. + #: + #: .. versionadded:: 0.7 + self.teardown_request_funcs = {} + + #: A list of functions that are called when the application context + #: is destroyed. Since the application context is also torn down + #: if the request ends this is the place to store code that disconnects + #: from databases. + #: + #: .. versionadded:: 0.9 + self.teardown_appcontext_funcs = [] + + #: A dictionary with lists of functions that are called before the + #: :attr:`before_request_funcs` functions. The key of the dictionary is + #: the name of the blueprint this function is active for, or ``None`` + #: for all requests. To register a function, use + #: :meth:`url_value_preprocessor`. + #: + #: .. versionadded:: 0.7 + self.url_value_preprocessors = {} + + #: A dictionary with lists of functions that can be used as URL value + #: preprocessors. The key ``None`` here is used for application wide + #: callbacks, otherwise the key is the name of the blueprint. + #: Each of these functions has the chance to modify the dictionary + #: of URL values before they are used as the keyword arguments of the + #: view function. For each function registered this one should also + #: provide a :meth:`url_defaults` function that adds the parameters + #: automatically again that were removed that way. + #: + #: .. versionadded:: 0.7 + self.url_default_functions = {} + + #: A dictionary with list of functions that are called without argument + #: to populate the template context. The key of the dictionary is the + #: name of the blueprint this function is active for, ``None`` for all + #: requests. Each returns a dictionary that the template context is + #: updated with. To register a function here, use the + #: :meth:`context_processor` decorator. + self.template_context_processors = {None: [_default_template_ctx_processor]} + + #: A list of shell context processor functions that should be run + #: when a shell context is created. + #: + #: .. versionadded:: 0.11 + self.shell_context_processors = [] + + #: all the attached blueprints in a dictionary by name. Blueprints + #: can be attached multiple times so this dictionary does not tell + #: you how often they got attached. + #: + #: .. versionadded:: 0.7 + self.blueprints = {} + self._blueprint_order = [] + + #: a place where extensions can store application specific state. For + #: example this is where an extension could store database engines and + #: similar things. For backwards compatibility extensions should register + #: themselves like this:: + #: + #: if not hasattr(app, 'extensions'): + #: app.extensions = {} + #: app.extensions['extensionname'] = SomeObject() + #: + #: The key must match the name of the extension module. For example in + #: case of a "Flask-Foo" extension in `flask_foo`, the key would be + #: ``'foo'``. + #: + #: .. versionadded:: 0.7 + self.extensions = {} + + #: The :class:`~werkzeug.routing.Map` for this instance. You can use + #: this to change the routing converters after the class was created + #: but before any routes are connected. Example:: + #: + #: from werkzeug.routing import BaseConverter + #: + #: class ListConverter(BaseConverter): + #: def to_python(self, value): + #: return value.split(',') + #: def to_url(self, values): + #: return ','.join(super(ListConverter, self).to_url(value) + #: for value in values) + #: + #: app = Flask(__name__) + #: app.url_map.converters['list'] = ListConverter + self.url_map = self.url_map_class() + + self.url_map.host_matching = host_matching + self.subdomain_matching = subdomain_matching + + # tracks internally if the application already handled at least one + # request. + self._got_first_request = False + self._before_request_lock = Lock() + + # Add a static route using the provided static_url_path, static_host, + # and static_folder if there is a configured static_folder. + # Note we do this without checking if static_folder exists. + # For one, it might be created while the server is running (e.g. during + # development). Also, Google App Engine stores static files somewhere + if self.has_static_folder: + assert ( + bool(static_host) == host_matching + ), "Invalid static_host/host_matching combination" + self.add_url_rule( + self.static_url_path + "/", + endpoint="static", + host=static_host, + view_func=self.send_static_file, + ) + + # Set the name of the Click group in case someone wants to add + # the app's commands to another CLI tool. + self.cli.name = self.name + + @locked_cached_property + def name(self): + """The name of the application. This is usually the import name + with the difference that it's guessed from the run file if the + import name is main. This name is used as a display name when + Flask needs the name of the application. It can be set and overridden + to change the value. + + .. versionadded:: 0.8 + """ + if self.import_name == "__main__": + fn = getattr(sys.modules["__main__"], "__file__", None) + if fn is None: + return "__main__" + return os.path.splitext(os.path.basename(fn))[0] + return self.import_name + + @property + def propagate_exceptions(self): + """Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration + value in case it's set, otherwise a sensible default is returned. + + .. versionadded:: 0.7 + """ + rv = self.config["PROPAGATE_EXCEPTIONS"] + if rv is not None: + return rv + return self.testing or self.debug + + @property + def preserve_context_on_exception(self): + """Returns the value of the ``PRESERVE_CONTEXT_ON_EXCEPTION`` + configuration value in case it's set, otherwise a sensible default + is returned. + + .. versionadded:: 0.7 + """ + rv = self.config["PRESERVE_CONTEXT_ON_EXCEPTION"] + if rv is not None: + return rv + return self.debug + + @locked_cached_property + def logger(self): + """A standard Python :class:`~logging.Logger` for the app, with + the same name as :attr:`name`. + + In debug mode, the logger's :attr:`~logging.Logger.level` will + be set to :data:`~logging.DEBUG`. + + If there are no handlers configured, a default handler will be + added. See :doc:`/logging` for more information. + + .. versionchanged:: 1.1.0 + The logger takes the same name as :attr:`name` rather than + hard-coding ``"flask.app"``. + + .. versionchanged:: 1.0.0 + Behavior was simplified. The logger is always named + ``"flask.app"``. The level is only set during configuration, + it doesn't check ``app.debug`` each time. Only one format is + used, not different ones depending on ``app.debug``. No + handlers are removed, and a handler is only added if no + handlers are already configured. + + .. versionadded:: 0.3 + """ + return create_logger(self) + + @locked_cached_property + def jinja_env(self): + """The Jinja environment used to load templates. + + The environment is created the first time this property is + accessed. Changing :attr:`jinja_options` after that will have no + effect. + """ + return self.create_jinja_environment() + + @property + def got_first_request(self): + """This attribute is set to ``True`` if the application started + handling the first request. + + .. versionadded:: 0.8 + """ + return self._got_first_request + + def make_config(self, instance_relative=False): + """Used to create the config attribute by the Flask constructor. + The `instance_relative` parameter is passed in from the constructor + of Flask (there named `instance_relative_config`) and indicates if + the config should be relative to the instance path or the root path + of the application. + + .. versionadded:: 0.8 + """ + root_path = self.root_path + if instance_relative: + root_path = self.instance_path + defaults = dict(self.default_config) + defaults["ENV"] = get_env() + defaults["DEBUG"] = get_debug_flag() + return self.config_class(root_path, defaults) + + def auto_find_instance_path(self): + """Tries to locate the instance path if it was not provided to the + constructor of the application class. It will basically calculate + the path to a folder named ``instance`` next to your main file or + the package. + + .. versionadded:: 0.8 + """ + prefix, package_path = find_package(self.import_name) + if prefix is None: + return os.path.join(package_path, "instance") + return os.path.join(prefix, "var", self.name + "-instance") + + def open_instance_resource(self, resource, mode="rb"): + """Opens a resource from the application's instance folder + (:attr:`instance_path`). Otherwise works like + :meth:`open_resource`. Instance resources can also be opened for + writing. + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + return open(os.path.join(self.instance_path, resource), mode) + + @property + def templates_auto_reload(self): + """Reload templates when they are changed. Used by + :meth:`create_jinja_environment`. + + This attribute can be configured with :data:`TEMPLATES_AUTO_RELOAD`. If + not set, it will be enabled in debug mode. + + .. versionadded:: 1.0 + This property was added but the underlying config and behavior + already existed. + """ + rv = self.config["TEMPLATES_AUTO_RELOAD"] + return rv if rv is not None else self.debug + + @templates_auto_reload.setter + def templates_auto_reload(self, value): + self.config["TEMPLATES_AUTO_RELOAD"] = value + + def create_jinja_environment(self): + """Create the Jinja environment based on :attr:`jinja_options` + and the various Jinja-related methods of the app. Changing + :attr:`jinja_options` after this will have no effect. Also adds + Flask-related globals and filters to the environment. + + .. versionchanged:: 0.11 + ``Environment.auto_reload`` set in accordance with + ``TEMPLATES_AUTO_RELOAD`` configuration option. + + .. versionadded:: 0.5 + """ + options = dict(self.jinja_options) + + if "autoescape" not in options: + options["autoescape"] = self.select_jinja_autoescape + + if "auto_reload" not in options: + options["auto_reload"] = self.templates_auto_reload + + rv = self.jinja_environment(self, **options) + rv.globals.update( + url_for=url_for, + get_flashed_messages=get_flashed_messages, + config=self.config, + # request, session and g are normally added with the + # context processor for efficiency reasons but for imported + # templates we also want the proxies in there. + request=request, + session=session, + g=g, + ) + rv.filters["tojson"] = json.tojson_filter + return rv + + def create_global_jinja_loader(self): + """Creates the loader for the Jinja2 environment. Can be used to + override just the loader and keeping the rest unchanged. It's + discouraged to override this function. Instead one should override + the :meth:`jinja_loader` function instead. + + The global loader dispatches between the loaders of the application + and the individual blueprints. + + .. versionadded:: 0.7 + """ + return DispatchingJinjaLoader(self) + + def select_jinja_autoescape(self, filename): + """Returns ``True`` if autoescaping should be active for the given + template name. If no template name is given, returns `True`. + + .. versionadded:: 0.5 + """ + if filename is None: + return True + return filename.endswith((".html", ".htm", ".xml", ".xhtml")) + + def update_template_context(self, context): + """Update the template context with some commonly used variables. + This injects request, session, config and g into the template + context as well as everything template context processors want + to inject. Note that the as of Flask 0.6, the original values + in the context will not be overridden if a context processor + decides to return a value with the same key. + + :param context: the context as a dictionary that is updated in place + to add extra variables. + """ + funcs = self.template_context_processors[None] + reqctx = _request_ctx_stack.top + if reqctx is not None: + bp = reqctx.request.blueprint + if bp is not None and bp in self.template_context_processors: + funcs = chain(funcs, self.template_context_processors[bp]) + orig_ctx = context.copy() + for func in funcs: + context.update(func()) + # make sure the original values win. This makes it possible to + # easier add new variables in context processors without breaking + # existing views. + context.update(orig_ctx) + + def make_shell_context(self): + """Returns the shell context for an interactive shell for this + application. This runs all the registered shell context + processors. + + .. versionadded:: 0.11 + """ + rv = {"app": self, "g": g} + for processor in self.shell_context_processors: + rv.update(processor()) + return rv + + #: What environment the app is running in. Flask and extensions may + #: enable behaviors based on the environment, such as enabling debug + #: mode. This maps to the :data:`ENV` config key. This is set by the + #: :envvar:`FLASK_ENV` environment variable and may not behave as + #: expected if set in code. + #: + #: **Do not enable development when deploying in production.** + #: + #: Default: ``'production'`` + env = ConfigAttribute("ENV") + + @property + def debug(self): + """Whether debug mode is enabled. When using ``flask run`` to start + the development server, an interactive debugger will be shown for + unhandled exceptions, and the server will be reloaded when code + changes. This maps to the :data:`DEBUG` config key. This is + enabled when :attr:`env` is ``'development'`` and is overridden + by the ``FLASK_DEBUG`` environment variable. It may not behave as + expected if set in code. + + **Do not enable debug mode when deploying in production.** + + Default: ``True`` if :attr:`env` is ``'development'``, or + ``False`` otherwise. + """ + return self.config["DEBUG"] + + @debug.setter + def debug(self, value): + self.config["DEBUG"] = value + self.jinja_env.auto_reload = self.templates_auto_reload + + def run(self, host=None, port=None, debug=None, load_dotenv=True, **options): + """Runs the application on a local development server. + + Do not use ``run()`` in a production setting. It is not intended to + meet security and performance requirements for a production server. + Instead, see :ref:`deployment` for WSGI server recommendations. + + If the :attr:`debug` flag is set the server will automatically reload + for code changes and show a debugger in case an exception happened. + + If you want to run the application in debug mode, but disable the + code execution on the interactive debugger, you can pass + ``use_evalex=False`` as parameter. This will keep the debugger's + traceback screen active, but disable code execution. + + It is not recommended to use this function for development with + automatic reloading as this is badly supported. Instead you should + be using the :command:`flask` command line script's ``run`` support. + + .. admonition:: Keep in Mind + + Flask will suppress any server error with a generic error page + unless it is in debug mode. As such to enable just the + interactive debugger without the code reloading, you have to + invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. + Setting ``use_debugger`` to ``True`` without being in debug mode + won't catch any exceptions because there won't be any to + catch. + + :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to + have the server available externally as well. Defaults to + ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable + if present. + :param port: the port of the webserver. Defaults to ``5000`` or the + port defined in the ``SERVER_NAME`` config variable if present. + :param debug: if given, enable or disable debug mode. See + :attr:`debug`. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param options: the options to be forwarded to the underlying Werkzeug + server. See :func:`werkzeug.serving.run_simple` for more + information. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment + variables from :file:`.env` and :file:`.flaskenv` files. + + If set, the :envvar:`FLASK_ENV` and :envvar:`FLASK_DEBUG` + environment variables will override :attr:`env` and + :attr:`debug`. + + Threaded mode is enabled by default. + + .. versionchanged:: 0.10 + The default port is now picked from the ``SERVER_NAME`` + variable. + """ + # Change this into a no-op if the server is invoked from the + # command line. Have a look at cli.py for more information. + if os.environ.get("FLASK_RUN_FROM_CLI") == "true": + from .debughelpers import explain_ignored_app_run + + explain_ignored_app_run() + return + + if get_load_dotenv(load_dotenv): + cli.load_dotenv() + + # if set, let env vars override previous values + if "FLASK_ENV" in os.environ: + self.env = get_env() + self.debug = get_debug_flag() + elif "FLASK_DEBUG" in os.environ: + self.debug = get_debug_flag() + + # debug passed to method overrides all other sources + if debug is not None: + self.debug = bool(debug) + + _host = "127.0.0.1" + _port = 5000 + server_name = self.config.get("SERVER_NAME") + sn_host, sn_port = None, None + + if server_name: + sn_host, _, sn_port = server_name.partition(":") + + host = host or sn_host or _host + # pick the first value that's not None (0 is allowed) + port = int(next((p for p in (port, sn_port) if p is not None), _port)) + + options.setdefault("use_reloader", self.debug) + options.setdefault("use_debugger", self.debug) + options.setdefault("threaded", True) + + cli.show_server_banner(self.env, self.debug, self.name, False) + + from werkzeug.serving import run_simple + + try: + run_simple(host, port, self, **options) + finally: + # reset the first request information if the development server + # reset normally. This makes it possible to restart the server + # without reloader and that stuff from an interactive shell. + self._got_first_request = False + + def test_client(self, use_cookies=True, **kwargs): + """Creates a test client for this application. For information + about unit testing head over to :ref:`testing`. + + Note that if you are testing for assertions or exceptions in your + application code, you must set ``app.testing = True`` in order for the + exceptions to propagate to the test client. Otherwise, the exception + will be handled by the application (not visible to the test client) and + the only indication of an AssertionError or other exception will be a + 500 status code response to the test client. See the :attr:`testing` + attribute. For example:: + + app.testing = True + client = app.test_client() + + The test client can be used in a ``with`` block to defer the closing down + of the context until the end of the ``with`` block. This is useful if + you want to access the context locals for testing:: + + with app.test_client() as c: + rv = c.get('/?vodka=42') + assert request.args['vodka'] == '42' + + Additionally, you may pass optional keyword arguments that will then + be passed to the application's :attr:`test_client_class` constructor. + For example:: + + from flask.testing import FlaskClient + + class CustomClient(FlaskClient): + def __init__(self, *args, **kwargs): + self._authentication = kwargs.pop("authentication") + super(CustomClient,self).__init__( *args, **kwargs) + + app.test_client_class = CustomClient + client = app.test_client(authentication='Basic ....') + + See :class:`~flask.testing.FlaskClient` for more information. + + .. versionchanged:: 0.4 + added support for ``with`` block usage for the client. + + .. versionadded:: 0.7 + The `use_cookies` parameter was added as well as the ability + to override the client to be used by setting the + :attr:`test_client_class` attribute. + + .. versionchanged:: 0.11 + Added `**kwargs` to support passing additional keyword arguments to + the constructor of :attr:`test_client_class`. + """ + cls = self.test_client_class + if cls is None: + from .testing import FlaskClient as cls + return cls(self, self.response_class, use_cookies=use_cookies, **kwargs) + + def test_cli_runner(self, **kwargs): + """Create a CLI runner for testing CLI commands. + See :ref:`testing-cli`. + + Returns an instance of :attr:`test_cli_runner_class`, by default + :class:`~flask.testing.FlaskCliRunner`. The Flask app object is + passed as the first argument. + + .. versionadded:: 1.0 + """ + cls = self.test_cli_runner_class + + if cls is None: + from .testing import FlaskCliRunner as cls + + return cls(self, **kwargs) + + def open_session(self, request): + """Creates or opens a new session. Default implementation stores all + session data in a signed cookie. This requires that the + :attr:`secret_key` is set. Instead of overriding this method + we recommend replacing the :class:`session_interface`. + + .. deprecated: 1.0 + Will be removed in 1.1. Use ``session_interface.open_session`` + instead. + + :param request: an instance of :attr:`request_class`. + """ + + warnings.warn( + DeprecationWarning( + '"open_session" is deprecated and will be removed in 1.1. Use' + ' "session_interface.open_session" instead.' + ) + ) + return self.session_interface.open_session(self, request) + + def save_session(self, session, response): + """Saves the session if it needs updates. For the default + implementation, check :meth:`open_session`. Instead of overriding this + method we recommend replacing the :class:`session_interface`. + + .. deprecated: 1.0 + Will be removed in 1.1. Use ``session_interface.save_session`` + instead. + + :param session: the session to be saved (a + :class:`~werkzeug.contrib.securecookie.SecureCookie` + object) + :param response: an instance of :attr:`response_class` + """ + + warnings.warn( + DeprecationWarning( + '"save_session" is deprecated and will be removed in 1.1. Use' + ' "session_interface.save_session" instead.' + ) + ) + return self.session_interface.save_session(self, session, response) + + def make_null_session(self): + """Creates a new instance of a missing session. Instead of overriding + this method we recommend replacing the :class:`session_interface`. + + .. deprecated: 1.0 + Will be removed in 1.1. Use ``session_interface.make_null_session`` + instead. + + .. versionadded:: 0.7 + """ + + warnings.warn( + DeprecationWarning( + '"make_null_session" is deprecated and will be removed in 1.1. Use' + ' "session_interface.make_null_session" instead.' + ) + ) + return self.session_interface.make_null_session(self) + + @setupmethod + def register_blueprint(self, blueprint, **options): + """Register a :class:`~flask.Blueprint` on the application. Keyword + arguments passed to this method will override the defaults set on the + blueprint. + + Calls the blueprint's :meth:`~flask.Blueprint.register` method after + recording the blueprint in the application's :attr:`blueprints`. + + :param blueprint: The blueprint to register. + :param url_prefix: Blueprint routes will be prefixed with this. + :param subdomain: Blueprint routes will match on this subdomain. + :param url_defaults: Blueprint routes will use these default values for + view arguments. + :param options: Additional keyword arguments are passed to + :class:`~flask.blueprints.BlueprintSetupState`. They can be + accessed in :meth:`~flask.Blueprint.record` callbacks. + + .. versionadded:: 0.7 + """ + first_registration = False + + if blueprint.name in self.blueprints: + assert self.blueprints[blueprint.name] is blueprint, ( + "A name collision occurred between blueprints %r and %r. Both" + ' share the same name "%s". Blueprints that are created on the' + " fly need unique names." + % (blueprint, self.blueprints[blueprint.name], blueprint.name) + ) + else: + self.blueprints[blueprint.name] = blueprint + self._blueprint_order.append(blueprint) + first_registration = True + + blueprint.register(self, options, first_registration) + + def iter_blueprints(self): + """Iterates over all blueprints by the order they were registered. + + .. versionadded:: 0.11 + """ + return iter(self._blueprint_order) + + @setupmethod + def add_url_rule( + self, + rule, + endpoint=None, + view_func=None, + provide_automatic_options=None, + **options + ): + """Connects a URL rule. Works exactly like the :meth:`route` + decorator. If a view_func is provided it will be registered with the + endpoint. + + Basically this example:: + + @app.route('/') + def index(): + pass + + Is equivalent to the following:: + + def index(): + pass + app.add_url_rule('/', 'index', index) + + If the view_func is not provided you will need to connect the endpoint + to a view function like so:: + + app.view_functions['index'] = index + + Internally :meth:`route` invokes :meth:`add_url_rule` so if you want + to customize the behavior via subclassing you only need to change + this method. + + For more information refer to :ref:`url-route-registrations`. + + .. versionchanged:: 0.2 + `view_func` parameter added. + + .. versionchanged:: 0.6 + ``OPTIONS`` is added automatically as method. + + :param rule: the URL rule as string + :param endpoint: the endpoint for the registered URL rule. Flask + itself assumes the name of the view function as + endpoint + :param view_func: the function to call when serving a request to the + provided endpoint + :param provide_automatic_options: controls whether the ``OPTIONS`` + method should be added automatically. This can also be controlled + by setting the ``view_func.provide_automatic_options = False`` + before adding the rule. + :param options: the options to be forwarded to the underlying + :class:`~werkzeug.routing.Rule` object. A change + to Werkzeug is handling of method options. methods + is a list of methods this rule should be limited + to (``GET``, ``POST`` etc.). By default a rule + just listens for ``GET`` (and implicitly ``HEAD``). + Starting with Flask 0.6, ``OPTIONS`` is implicitly + added and handled by the standard request handling. + """ + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) + options["endpoint"] = endpoint + methods = options.pop("methods", None) + + # if the methods are not given and the view_func object knows its + # methods we can use that instead. If neither exists, we go with + # a tuple of only ``GET`` as default. + if methods is None: + methods = getattr(view_func, "methods", None) or ("GET",) + if isinstance(methods, string_types): + raise TypeError( + "Allowed methods have to be iterables of strings, " + 'for example: @app.route(..., methods=["POST"])' + ) + methods = set(item.upper() for item in methods) + + # Methods that should always be added + required_methods = set(getattr(view_func, "required_methods", ())) + + # starting with Flask 0.8 the view_func object can disable and + # force-enable the automatic options handling. + if provide_automatic_options is None: + provide_automatic_options = getattr( + view_func, "provide_automatic_options", None + ) + + if provide_automatic_options is None: + if "OPTIONS" not in methods: + provide_automatic_options = True + required_methods.add("OPTIONS") + else: + provide_automatic_options = False + + # Add the required methods now. + methods |= required_methods + + rule = self.url_rule_class(rule, methods=methods, **options) + rule.provide_automatic_options = provide_automatic_options + + self.url_map.add(rule) + if view_func is not None: + old_func = self.view_functions.get(endpoint) + if old_func is not None and old_func != view_func: + raise AssertionError( + "View function mapping is overwriting an " + "existing endpoint function: %s" % endpoint + ) + self.view_functions[endpoint] = view_func + + def route(self, rule, **options): + """A decorator that is used to register a view function for a + given URL rule. This does the same thing as :meth:`add_url_rule` + but is intended for decorator usage:: + + @app.route('/') + def index(): + return 'Hello World' + + For more information refer to :ref:`url-route-registrations`. + + :param rule: the URL rule as string + :param endpoint: the endpoint for the registered URL rule. Flask + itself assumes the name of the view function as + endpoint + :param options: the options to be forwarded to the underlying + :class:`~werkzeug.routing.Rule` object. A change + to Werkzeug is handling of method options. methods + is a list of methods this rule should be limited + to (``GET``, ``POST`` etc.). By default a rule + just listens for ``GET`` (and implicitly ``HEAD``). + Starting with Flask 0.6, ``OPTIONS`` is implicitly + added and handled by the standard request handling. + """ + + def decorator(f): + endpoint = options.pop("endpoint", None) + self.add_url_rule(rule, endpoint, f, **options) + return f + + return decorator + + @setupmethod + def endpoint(self, endpoint): + """A decorator to register a function as an endpoint. + Example:: + + @app.endpoint('example.endpoint') + def example(): + return "example" + + :param endpoint: the name of the endpoint + """ + + def decorator(f): + self.view_functions[endpoint] = f + return f + + return decorator + + @staticmethod + def _get_exc_class_and_code(exc_class_or_code): + """Get the exception class being handled. For HTTP status codes + or ``HTTPException`` subclasses, return both the exception and + status code. + + :param exc_class_or_code: Any exception class, or an HTTP status + code as an integer. + """ + if isinstance(exc_class_or_code, integer_types): + exc_class = default_exceptions[exc_class_or_code] + else: + exc_class = exc_class_or_code + + assert issubclass(exc_class, Exception) + + if issubclass(exc_class, HTTPException): + return exc_class, exc_class.code + else: + return exc_class, None + + @setupmethod + def errorhandler(self, code_or_exception): + """Register a function to handle errors by code or exception class. + + A decorator that is used to register a function given an + error code. Example:: + + @app.errorhandler(404) + def page_not_found(error): + return 'This page does not exist', 404 + + You can also register handlers for arbitrary exceptions:: + + @app.errorhandler(DatabaseError) + def special_exception_handler(error): + return 'Database connection failed', 500 + + .. versionadded:: 0.7 + Use :meth:`register_error_handler` instead of modifying + :attr:`error_handler_spec` directly, for application wide error + handlers. + + .. versionadded:: 0.7 + One can now additionally also register custom exception types + that do not necessarily have to be a subclass of the + :class:`~werkzeug.exceptions.HTTPException` class. + + :param code_or_exception: the code as integer for the handler, or + an arbitrary exception + """ + + def decorator(f): + self._register_error_handler(None, code_or_exception, f) + return f + + return decorator + + @setupmethod + def register_error_handler(self, code_or_exception, f): + """Alternative error attach function to the :meth:`errorhandler` + decorator that is more straightforward to use for non decorator + usage. + + .. versionadded:: 0.7 + """ + self._register_error_handler(None, code_or_exception, f) + + @setupmethod + def _register_error_handler(self, key, code_or_exception, f): + """ + :type key: None|str + :type code_or_exception: int|T<=Exception + :type f: callable + """ + if isinstance(code_or_exception, HTTPException): # old broken behavior + raise ValueError( + "Tried to register a handler for an exception instance {0!r}." + " Handlers can only be registered for exception classes or" + " HTTP error codes.".format(code_or_exception) + ) + + try: + exc_class, code = self._get_exc_class_and_code(code_or_exception) + except KeyError: + raise KeyError( + "'{0}' is not a recognized HTTP error code. Use a subclass of" + " HTTPException with that code instead.".format(code_or_exception) + ) + + handlers = self.error_handler_spec.setdefault(key, {}).setdefault(code, {}) + handlers[exc_class] = f + + @setupmethod + def template_filter(self, name=None): + """A decorator that is used to register custom template filter. + You can specify a name for the filter, otherwise the function + name will be used. Example:: + + @app.template_filter() + def reverse(s): + return s[::-1] + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f): + self.add_template_filter(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_filter(self, f, name=None): + """Register a custom template filter. Works exactly like the + :meth:`template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + self.jinja_env.filters[name or f.__name__] = f + + @setupmethod + def template_test(self, name=None): + """A decorator that is used to register custom template test. + You can specify a name for the test, otherwise the function + name will be used. Example:: + + @app.template_test() + def is_prime(n): + if n == 2: + return True + for i in range(2, int(math.ceil(math.sqrt(n))) + 1): + if n % i == 0: + return False + return True + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f): + self.add_template_test(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_test(self, f, name=None): + """Register a custom template test. Works exactly like the + :meth:`template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + self.jinja_env.tests[name or f.__name__] = f + + @setupmethod + def template_global(self, name=None): + """A decorator that is used to register a custom template global function. + You can specify a name for the global function, otherwise the function + name will be used. Example:: + + @app.template_global() + def double(n): + return 2 * n + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + + def decorator(f): + self.add_template_global(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_global(self, f, name=None): + """Register a custom template global function. Works exactly like the + :meth:`template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + self.jinja_env.globals[name or f.__name__] = f + + @setupmethod + def before_request(self, f): + """Registers a function to run before each request. + + For example, this can be used to open a database connection, or to load + the logged in user from the session. + + The function will be called without any arguments. If it returns a + non-None value, the value is handled as if it was the return value from + the view, and further request handling is stopped. + """ + self.before_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def before_first_request(self, f): + """Registers a function to be run before the first request to this + instance of the application. + + The function will be called without any arguments and its return + value is ignored. + + .. versionadded:: 0.8 + """ + self.before_first_request_funcs.append(f) + return f + + @setupmethod + def after_request(self, f): + """Register a function to be run after each request. + + Your function must take one parameter, an instance of + :attr:`response_class` and return a new response object or the + same (see :meth:`process_response`). + + As of Flask 0.7 this function might not be executed at the end of the + request in case an unhandled exception occurred. + """ + self.after_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def teardown_request(self, f): + """Register a function to be run at the end of each request, + regardless of whether there was an exception or not. These functions + are executed when the request context is popped, even if not an + actual request was performed. + + Example:: + + ctx = app.test_request_context() + ctx.push() + ... + ctx.pop() + + When ``ctx.pop()`` is executed in the above example, the teardown + functions are called just before the request context moves from the + stack of active contexts. This becomes relevant if you are using + such constructs in tests. + + Generally teardown functions must take every necessary step to avoid + that they will fail. If they do execute code that might fail they + will have to surround the execution of these code by try/except + statements and log occurring errors. + + When a teardown function was called because of an exception it will + be passed an error object. + + The return values of teardown functions are ignored. + + .. admonition:: Debug Note + + In debug mode Flask will not tear down a request on an exception + immediately. Instead it will keep it alive so that the interactive + debugger can still access it. This behavior can be controlled + by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable. + """ + self.teardown_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def teardown_appcontext(self, f): + """Registers a function to be called when the application context + ends. These functions are typically also called when the request + context is popped. + + Example:: + + ctx = app.app_context() + ctx.push() + ... + ctx.pop() + + When ``ctx.pop()`` is executed in the above example, the teardown + functions are called just before the app context moves from the + stack of active contexts. This becomes relevant if you are using + such constructs in tests. + + Since a request context typically also manages an application + context it would also be called when you pop a request context. + + When a teardown function was called because of an unhandled exception + it will be passed an error object. If an :meth:`errorhandler` is + registered, it will handle the exception and the teardown will not + receive it. + + The return values of teardown functions are ignored. + + .. versionadded:: 0.9 + """ + self.teardown_appcontext_funcs.append(f) + return f + + @setupmethod + def context_processor(self, f): + """Registers a template context processor function.""" + self.template_context_processors[None].append(f) + return f + + @setupmethod + def shell_context_processor(self, f): + """Registers a shell context processor function. + + .. versionadded:: 0.11 + """ + self.shell_context_processors.append(f) + return f + + @setupmethod + def url_value_preprocessor(self, f): + """Register a URL value preprocessor function for all view + functions in the application. These functions will be called before the + :meth:`before_request` functions. + + The function can modify the values captured from the matched url before + they are passed to the view. For example, this can be used to pop a + common language code value and place it in ``g`` rather than pass it to + every view. + + The function is passed the endpoint name and values dict. The return + value is ignored. + """ + self.url_value_preprocessors.setdefault(None, []).append(f) + return f + + @setupmethod + def url_defaults(self, f): + """Callback function for URL defaults for all view functions of the + application. It's called with the endpoint and values and should + update the values passed in place. + """ + self.url_default_functions.setdefault(None, []).append(f) + return f + + def _find_error_handler(self, e): + """Return a registered error handler for an exception in this order: + blueprint handler for a specific code, app handler for a specific code, + blueprint handler for an exception class, app handler for an exception + class, or ``None`` if a suitable handler is not found. + """ + exc_class, code = self._get_exc_class_and_code(type(e)) + + for name, c in ( + (request.blueprint, code), + (None, code), + (request.blueprint, None), + (None, None), + ): + handler_map = self.error_handler_spec.setdefault(name, {}).get(c) + + if not handler_map: + continue + + for cls in exc_class.__mro__: + handler = handler_map.get(cls) + + if handler is not None: + return handler + + def handle_http_exception(self, e): + """Handles an HTTP exception. By default this will invoke the + registered error handlers and fall back to returning the + exception as response. + + .. versionchanged:: 1.0.3 + ``RoutingException``, used internally for actions such as + slash redirects during routing, is not passed to error + handlers. + + .. versionchanged:: 1.0 + Exceptions are looked up by code *and* by MRO, so + ``HTTPExcpetion`` subclasses can be handled with a catch-all + handler for the base ``HTTPException``. + + .. versionadded:: 0.3 + """ + # Proxy exceptions don't have error codes. We want to always return + # those unchanged as errors + if e.code is None: + return e + + # RoutingExceptions are used internally to trigger routing + # actions, such as slash redirects raising RequestRedirect. They + # are not raised or handled in user code. + if isinstance(e, RoutingException): + return e + + handler = self._find_error_handler(e) + if handler is None: + return e + return handler(e) + + def trap_http_exception(self, e): + """Checks if an HTTP exception should be trapped or not. By default + this will return ``False`` for all exceptions except for a bad request + key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It + also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. + + This is called for all HTTP exceptions raised by a view function. + If it returns ``True`` for any exception the error handler for this + exception is not called and it shows up as regular exception in the + traceback. This is helpful for debugging implicitly raised HTTP + exceptions. + + .. versionchanged:: 1.0 + Bad request errors are not trapped by default in debug mode. + + .. versionadded:: 0.8 + """ + if self.config["TRAP_HTTP_EXCEPTIONS"]: + return True + + trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] + + # if unset, trap key errors in debug mode + if ( + trap_bad_request is None + and self.debug + and isinstance(e, BadRequestKeyError) + ): + return True + + if trap_bad_request: + return isinstance(e, BadRequest) + + return False + + def handle_user_exception(self, e): + """This method is called whenever an exception occurs that + should be handled. A special case is :class:`~werkzeug + .exceptions.HTTPException` which is forwarded to the + :meth:`handle_http_exception` method. This function will either + return a response value or reraise the exception with the same + traceback. + + .. versionchanged:: 1.0 + Key errors raised from request data like ``form`` show the + bad key in debug mode rather than a generic bad request + message. + + .. versionadded:: 0.7 + """ + exc_type, exc_value, tb = sys.exc_info() + assert exc_value is e + # ensure not to trash sys.exc_info() at that point in case someone + # wants the traceback preserved in handle_http_exception. Of course + # we cannot prevent users from trashing it themselves in a custom + # trap_http_exception method so that's their fault then. + + if isinstance(e, BadRequestKeyError): + if self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"]: + e.show_exception = True + + # Werkzeug < 0.15 doesn't add the KeyError to the 400 + # message, add it in manually. + # TODO: clean up once Werkzeug >= 0.15.5 is required + if e.args[0] not in e.get_description(): + e.description = "KeyError: '{}'".format(*e.args) + elif not hasattr(BadRequestKeyError, "show_exception"): + e.args = () + + if isinstance(e, HTTPException) and not self.trap_http_exception(e): + return self.handle_http_exception(e) + + handler = self._find_error_handler(e) + + if handler is None: + reraise(exc_type, exc_value, tb) + return handler(e) + + def handle_exception(self, e): + """Handle an exception that did not have an error handler + associated with it, or that was raised from an error handler. + This always causes a 500 ``InternalServerError``. + + Always sends the :data:`got_request_exception` signal. + + If :attr:`propagate_exceptions` is ``True``, such as in debug + mode, the error will be re-raised so that the debugger can + display it. Otherwise, the original exception is logged, and + an :exc:`~werkzeug.exceptions.InternalServerError` is returned. + + If an error handler is registered for ``InternalServerError`` or + ``500``, it will be used. For consistency, the handler will + always receive the ``InternalServerError``. The original + unhandled exception is available as ``e.original_exception``. + + .. note:: + Prior to Werkzeug 1.0.0, ``InternalServerError`` will not + always have an ``original_exception`` attribute. Use + ``getattr(e, "original_exception", None)`` to simulate the + behavior for compatibility. + + .. versionchanged:: 1.1.0 + Always passes the ``InternalServerError`` instance to the + handler, setting ``original_exception`` to the unhandled + error. + + .. versionchanged:: 1.1.0 + ``after_request`` functions and other finalization is done + even for the default 500 response when there is no handler. + + .. versionadded:: 0.3 + """ + exc_type, exc_value, tb = sys.exc_info() + got_request_exception.send(self, exception=e) + + if self.propagate_exceptions: + # if we want to repropagate the exception, we can attempt to + # raise it with the whole traceback in case we can do that + # (the function was actually called from the except part) + # otherwise, we just raise the error again + if exc_value is e: + reraise(exc_type, exc_value, tb) + else: + raise e + + self.log_exception((exc_type, exc_value, tb)) + server_error = InternalServerError() + # TODO: pass as param when Werkzeug>=1.0.0 is required + # TODO: also remove note about this from docstring and docs + server_error.original_exception = e + handler = self._find_error_handler(server_error) + + if handler is not None: + server_error = handler(server_error) + + return self.finalize_request(server_error, from_error_handler=True) + + def log_exception(self, exc_info): + """Logs an exception. This is called by :meth:`handle_exception` + if debugging is disabled and right before the handler is called. + The default implementation logs the exception as error on the + :attr:`logger`. + + .. versionadded:: 0.8 + """ + self.logger.error( + "Exception on %s [%s]" % (request.path, request.method), exc_info=exc_info + ) + + def raise_routing_exception(self, request): + """Exceptions that are recording during routing are reraised with + this method. During debug we are not reraising redirect requests + for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising + a different error instead to help debug situations. + + :internal: + """ + if ( + not self.debug + or not isinstance(request.routing_exception, RequestRedirect) + or request.method in ("GET", "HEAD", "OPTIONS") + ): + raise request.routing_exception + + from .debughelpers import FormDataRoutingRedirect + + raise FormDataRoutingRedirect(request) + + def dispatch_request(self): + """Does the request dispatching. Matches the URL and returns the + return value of the view or error handler. This does not have to + be a response object. In order to convert the return value to a + proper response object, call :func:`make_response`. + + .. versionchanged:: 0.7 + This no longer does the exception handling, this code was + moved to the new :meth:`full_dispatch_request`. + """ + req = _request_ctx_stack.top.request + if req.routing_exception is not None: + self.raise_routing_exception(req) + rule = req.url_rule + # if we provide automatic options for this URL and the + # request came with the OPTIONS method, reply automatically + if ( + getattr(rule, "provide_automatic_options", False) + and req.method == "OPTIONS" + ): + return self.make_default_options_response() + # otherwise dispatch to the handler for that endpoint + return self.view_functions[rule.endpoint](**req.view_args) + + def full_dispatch_request(self): + """Dispatches the request and on top of that performs request + pre and postprocessing as well as HTTP exception catching and + error handling. + + .. versionadded:: 0.7 + """ + self.try_trigger_before_first_request_functions() + try: + request_started.send(self) + rv = self.preprocess_request() + if rv is None: + rv = self.dispatch_request() + except Exception as e: + rv = self.handle_user_exception(e) + return self.finalize_request(rv) + + def finalize_request(self, rv, from_error_handler=False): + """Given the return value from a view function this finalizes + the request by converting it into a response and invoking the + postprocessing functions. This is invoked for both normal + request dispatching as well as error handlers. + + Because this means that it might be called as a result of a + failure a special safe mode is available which can be enabled + with the `from_error_handler` flag. If enabled, failures in + response processing will be logged and otherwise ignored. + + :internal: + """ + response = self.make_response(rv) + try: + response = self.process_response(response) + request_finished.send(self, response=response) + except Exception: + if not from_error_handler: + raise + self.logger.exception( + "Request finalizing failed with an error while handling an error" + ) + return response + + def try_trigger_before_first_request_functions(self): + """Called before each request and will ensure that it triggers + the :attr:`before_first_request_funcs` and only exactly once per + application instance (which means process usually). + + :internal: + """ + if self._got_first_request: + return + with self._before_request_lock: + if self._got_first_request: + return + for func in self.before_first_request_funcs: + func() + self._got_first_request = True + + def make_default_options_response(self): + """This method is called to create the default ``OPTIONS`` response. + This can be changed through subclassing to change the default + behavior of ``OPTIONS`` responses. + + .. versionadded:: 0.7 + """ + adapter = _request_ctx_stack.top.url_adapter + if hasattr(adapter, "allowed_methods"): + methods = adapter.allowed_methods() + else: + # fallback for Werkzeug < 0.7 + methods = [] + try: + adapter.match(method="--") + except MethodNotAllowed as e: + methods = e.valid_methods + except HTTPException: + pass + rv = self.response_class() + rv.allow.update(methods) + return rv + + def should_ignore_error(self, error): + """This is called to figure out if an error should be ignored + or not as far as the teardown system is concerned. If this + function returns ``True`` then the teardown handlers will not be + passed the error. + + .. versionadded:: 0.10 + """ + return False + + def make_response(self, rv): + """Convert the return value from a view function to an instance of + :attr:`response_class`. + + :param rv: the return value from the view function. The view function + must return a response. Returning ``None``, or the view ending + without returning, is not allowed. The following types are allowed + for ``view_rv``: + + ``str`` (``unicode`` in Python 2) + A response object is created with the string encoded to UTF-8 + as the body. + + ``bytes`` (``str`` in Python 2) + A response object is created with the bytes as the body. + + ``dict`` + A dictionary that will be jsonify'd before being returned. + + ``tuple`` + Either ``(body, status, headers)``, ``(body, status)``, or + ``(body, headers)``, where ``body`` is any of the other types + allowed here, ``status`` is a string or an integer, and + ``headers`` is a dictionary or a list of ``(key, value)`` + tuples. If ``body`` is a :attr:`response_class` instance, + ``status`` overwrites the exiting value and ``headers`` are + extended. + + :attr:`response_class` + The object is returned unchanged. + + other :class:`~werkzeug.wrappers.Response` class + The object is coerced to :attr:`response_class`. + + :func:`callable` + The function is called as a WSGI application. The result is + used to create a response object. + + .. versionchanged:: 0.9 + Previously a tuple was interpreted as the arguments for the + response object. + """ + + status = headers = None + + # unpack tuple returns + if isinstance(rv, tuple): + len_rv = len(rv) + + # a 3-tuple is unpacked directly + if len_rv == 3: + rv, status, headers = rv + # decide if a 2-tuple has status or headers + elif len_rv == 2: + if isinstance(rv[1], (Headers, dict, tuple, list)): + rv, headers = rv + else: + rv, status = rv + # other sized tuples are not allowed + else: + raise TypeError( + "The view function did not return a valid response tuple." + " The tuple must have the form (body, status, headers)," + " (body, status), or (body, headers)." + ) + + # the body must not be None + if rv is None: + raise TypeError( + "The view function did not return a valid response. The" + " function either returned None or ended without a return" + " statement." + ) + + # make sure the body is an instance of the response class + if not isinstance(rv, self.response_class): + if isinstance(rv, (text_type, bytes, bytearray)): + # let the response class set the status and headers instead of + # waiting to do it manually, so that the class can handle any + # special logic + rv = self.response_class(rv, status=status, headers=headers) + status = headers = None + elif isinstance(rv, dict): + rv = jsonify(rv) + elif isinstance(rv, BaseResponse) or callable(rv): + # evaluate a WSGI callable, or coerce a different response + # class to the correct type + try: + rv = self.response_class.force_type(rv, request.environ) + except TypeError as e: + new_error = TypeError( + "{e}\nThe view function did not return a valid" + " response. The return type must be a string, dict, tuple," + " Response instance, or WSGI callable, but it was a" + " {rv.__class__.__name__}.".format(e=e, rv=rv) + ) + reraise(TypeError, new_error, sys.exc_info()[2]) + else: + raise TypeError( + "The view function did not return a valid" + " response. The return type must be a string, dict, tuple," + " Response instance, or WSGI callable, but it was a" + " {rv.__class__.__name__}.".format(rv=rv) + ) + + # prefer the status if it was provided + if status is not None: + if isinstance(status, (text_type, bytes, bytearray)): + rv.status = status + else: + rv.status_code = status + + # extend existing headers with provided headers + if headers: + rv.headers.extend(headers) + + return rv + + def create_url_adapter(self, request): + """Creates a URL adapter for the given request. The URL adapter + is created at a point where the request context is not yet set + up so the request is passed explicitly. + + .. versionadded:: 0.6 + + .. versionchanged:: 0.9 + This can now also be called without a request object when the + URL adapter is created for the application context. + + .. versionchanged:: 1.0 + :data:`SERVER_NAME` no longer implicitly enables subdomain + matching. Use :attr:`subdomain_matching` instead. + """ + if request is not None: + # If subdomain matching is disabled (the default), use the + # default subdomain in all cases. This should be the default + # in Werkzeug but it currently does not have that feature. + subdomain = ( + (self.url_map.default_subdomain or None) + if not self.subdomain_matching + else None + ) + return self.url_map.bind_to_environ( + request.environ, + server_name=self.config["SERVER_NAME"], + subdomain=subdomain, + ) + # We need at the very least the server name to be set for this + # to work. + if self.config["SERVER_NAME"] is not None: + return self.url_map.bind( + self.config["SERVER_NAME"], + script_name=self.config["APPLICATION_ROOT"], + url_scheme=self.config["PREFERRED_URL_SCHEME"], + ) + + def inject_url_defaults(self, endpoint, values): + """Injects the URL defaults for the given endpoint directly into + the values dictionary passed. This is used internally and + automatically called on URL building. + + .. versionadded:: 0.7 + """ + funcs = self.url_default_functions.get(None, ()) + if "." in endpoint: + bp = endpoint.rsplit(".", 1)[0] + funcs = chain(funcs, self.url_default_functions.get(bp, ())) + for func in funcs: + func(endpoint, values) + + def handle_url_build_error(self, error, endpoint, values): + """Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`. + """ + exc_type, exc_value, tb = sys.exc_info() + for handler in self.url_build_error_handlers: + try: + rv = handler(error, endpoint, values) + if rv is not None: + return rv + except BuildError as e: + # make error available outside except block (py3) + error = e + + # At this point we want to reraise the exception. If the error is + # still the same one we can reraise it with the original traceback, + # otherwise we raise it from here. + if error is exc_value: + reraise(exc_type, exc_value, tb) + raise error + + def preprocess_request(self): + """Called before the request is dispatched. Calls + :attr:`url_value_preprocessors` registered with the app and the + current blueprint (if any). Then calls :attr:`before_request_funcs` + registered with the app and the blueprint. + + If any :meth:`before_request` handler returns a non-None value, the + value is handled as if it was the return value from the view, and + further request handling is stopped. + """ + + bp = _request_ctx_stack.top.request.blueprint + + funcs = self.url_value_preprocessors.get(None, ()) + if bp is not None and bp in self.url_value_preprocessors: + funcs = chain(funcs, self.url_value_preprocessors[bp]) + for func in funcs: + func(request.endpoint, request.view_args) + + funcs = self.before_request_funcs.get(None, ()) + if bp is not None and bp in self.before_request_funcs: + funcs = chain(funcs, self.before_request_funcs[bp]) + for func in funcs: + rv = func() + if rv is not None: + return rv + + def process_response(self, response): + """Can be overridden in order to modify the response object + before it's sent to the WSGI server. By default this will + call all the :meth:`after_request` decorated functions. + + .. versionchanged:: 0.5 + As of Flask 0.5 the functions registered for after request + execution are called in reverse order of registration. + + :param response: a :attr:`response_class` object. + :return: a new response object or the same, has to be an + instance of :attr:`response_class`. + """ + ctx = _request_ctx_stack.top + bp = ctx.request.blueprint + funcs = ctx._after_request_functions + if bp is not None and bp in self.after_request_funcs: + funcs = chain(funcs, reversed(self.after_request_funcs[bp])) + if None in self.after_request_funcs: + funcs = chain(funcs, reversed(self.after_request_funcs[None])) + for handler in funcs: + response = handler(response) + if not self.session_interface.is_null_session(ctx.session): + self.session_interface.save_session(self, ctx.session, response) + return response + + def do_teardown_request(self, exc=_sentinel): + """Called after the request is dispatched and the response is + returned, right before the request context is popped. + + This calls all functions decorated with + :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` + if a blueprint handled the request. Finally, the + :data:`request_tearing_down` signal is sent. + + This is called by + :meth:`RequestContext.pop() `, + which may be delayed during testing to maintain access to + resources. + + :param exc: An unhandled exception raised while dispatching the + request. Detected from the current exception information if + not passed. Passed to each teardown function. + + .. versionchanged:: 0.9 + Added the ``exc`` argument. + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + funcs = reversed(self.teardown_request_funcs.get(None, ())) + bp = _request_ctx_stack.top.request.blueprint + if bp is not None and bp in self.teardown_request_funcs: + funcs = chain(funcs, reversed(self.teardown_request_funcs[bp])) + for func in funcs: + func(exc) + request_tearing_down.send(self, exc=exc) + + def do_teardown_appcontext(self, exc=_sentinel): + """Called right before the application context is popped. + + When handling a request, the application context is popped + after the request context. See :meth:`do_teardown_request`. + + This calls all functions decorated with + :meth:`teardown_appcontext`. Then the + :data:`appcontext_tearing_down` signal is sent. + + This is called by + :meth:`AppContext.pop() `. + + .. versionadded:: 0.9 + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + for func in reversed(self.teardown_appcontext_funcs): + func(exc) + appcontext_tearing_down.send(self, exc=exc) + + def app_context(self): + """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` + block to push the context, which will make :data:`current_app` + point at this application. + + An application context is automatically pushed by + :meth:`RequestContext.push() ` + when handling a request, and when running a CLI command. Use + this to manually create a context outside of these situations. + + :: + + with app.app_context(): + init_db() + + See :doc:`/appcontext`. + + .. versionadded:: 0.9 + """ + return AppContext(self) + + def request_context(self, environ): + """Create a :class:`~flask.ctx.RequestContext` representing a + WSGI environment. Use a ``with`` block to push the context, + which will make :data:`request` point at this request. + + See :doc:`/reqcontext`. + + Typically you should not call this from your own code. A request + context is automatically pushed by the :meth:`wsgi_app` when + handling a request. Use :meth:`test_request_context` to create + an environment and context instead of this method. + + :param environ: a WSGI environment + """ + return RequestContext(self, environ) + + def test_request_context(self, *args, **kwargs): + """Create a :class:`~flask.ctx.RequestContext` for a WSGI + environment created from the given values. This is mostly useful + during testing, where you may want to run a function that uses + request data without dispatching a full request. + + See :doc:`/reqcontext`. + + Use a ``with`` block to push the context, which will make + :data:`request` point at the request for the created + environment. :: + + with test_request_context(...): + generate_report() + + When using the shell, it may be easier to push and pop the + context manually to avoid indentation. :: + + ctx = app.test_request_context(...) + ctx.push() + ... + ctx.pop() + + Takes the same arguments as Werkzeug's + :class:`~werkzeug.test.EnvironBuilder`, with some defaults from + the application. See the linked Werkzeug docs for most of the + available arguments. Flask-specific behavior is listed here. + + :param path: URL path being requested. + :param base_url: Base URL where the app is being served, which + ``path`` is relative to. If not given, built from + :data:`PREFERRED_URL_SCHEME`, ``subdomain``, + :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. + :param subdomain: Subdomain name to append to + :data:`SERVER_NAME`. + :param url_scheme: Scheme to use instead of + :data:`PREFERRED_URL_SCHEME`. + :param data: The request body, either as a string or a dict of + form keys and values. + :param json: If given, this is serialized as JSON and passed as + ``data``. Also defaults ``content_type`` to + ``application/json``. + :param args: other positional arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + :param kwargs: other keyword arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + """ + from .testing import EnvironBuilder + + builder = EnvironBuilder(self, *args, **kwargs) + + try: + return self.request_context(builder.get_environ()) + finally: + builder.close() + + def wsgi_app(self, environ, start_response): + """The actual WSGI application. This is not implemented in + :meth:`__call__` so that middlewares can be applied without + losing a reference to the app object. Instead of doing this:: + + app = MyMiddleware(app) + + It's a better idea to do this instead:: + + app.wsgi_app = MyMiddleware(app.wsgi_app) + + Then you still have the original application object around and + can continue to call methods on it. + + .. versionchanged:: 0.7 + Teardown events for the request and app contexts are called + even if an unhandled error occurs. Other events may not be + called depending on when an error occurs during dispatch. + See :ref:`callbacks-and-errors`. + + :param environ: A WSGI environment. + :param start_response: A callable accepting a status code, + a list of headers, and an optional exception context to + start the response. + """ + ctx = self.request_context(environ) + error = None + try: + try: + ctx.push() + response = self.full_dispatch_request() + except Exception as e: + error = e + response = self.handle_exception(e) + except: # noqa: B001 + error = sys.exc_info()[1] + raise + return response(environ, start_response) + finally: + if self.should_ignore_error(error): + error = None + ctx.auto_pop(error) + + def __call__(self, environ, start_response): + """The WSGI server calls the Flask application object as the + WSGI application. This calls :meth:`wsgi_app` which can be + wrapped to applying middleware.""" + return self.wsgi_app(environ, start_response) + + def __repr__(self): + return "<%s %r>" % (self.__class__.__name__, self.name) diff --git a/py3/lib/python3.6/site-packages/flask/blueprints.py b/py3/lib/python3.6/site-packages/flask/blueprints.py new file mode 100644 index 0000000..8978104 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/blueprints.py @@ -0,0 +1,569 @@ +# -*- coding: utf-8 -*- +""" + flask.blueprints + ~~~~~~~~~~~~~~~~ + + Blueprints are the recommended way to implement larger or more + pluggable applications in Flask 0.7 and later. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +from functools import update_wrapper + +from .helpers import _endpoint_from_view_func +from .helpers import _PackageBoundObject + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +class BlueprintSetupState(object): + """Temporary holder object for registering a blueprint with the + application. An instance of this class is created by the + :meth:`~flask.Blueprint.make_setup_state` method and later passed + to all register callback functions. + """ + + def __init__(self, blueprint, app, options, first_registration): + #: a reference to the current application + self.app = app + + #: a reference to the blueprint that created this setup state. + self.blueprint = blueprint + + #: a dictionary with all options that were passed to the + #: :meth:`~flask.Flask.register_blueprint` method. + self.options = options + + #: as blueprints can be registered multiple times with the + #: application and not everything wants to be registered + #: multiple times on it, this attribute can be used to figure + #: out if the blueprint was registered in the past already. + self.first_registration = first_registration + + subdomain = self.options.get("subdomain") + if subdomain is None: + subdomain = self.blueprint.subdomain + + #: The subdomain that the blueprint should be active for, ``None`` + #: otherwise. + self.subdomain = subdomain + + url_prefix = self.options.get("url_prefix") + if url_prefix is None: + url_prefix = self.blueprint.url_prefix + #: The prefix that should be used for all URLs defined on the + #: blueprint. + self.url_prefix = url_prefix + + #: A dictionary with URL defaults that is added to each and every + #: URL that was defined with the blueprint. + self.url_defaults = dict(self.blueprint.url_values_defaults) + self.url_defaults.update(self.options.get("url_defaults", ())) + + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """A helper method to register a rule (and optionally a view function) + to the application. The endpoint is automatically prefixed with the + blueprint's name. + """ + if self.url_prefix is not None: + if rule: + rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) + else: + rule = self.url_prefix + options.setdefault("subdomain", self.subdomain) + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) + defaults = self.url_defaults + if "defaults" in options: + defaults = dict(defaults, **options.pop("defaults")) + self.app.add_url_rule( + rule, + "%s.%s" % (self.blueprint.name, endpoint), + view_func, + defaults=defaults, + **options + ) + + +class Blueprint(_PackageBoundObject): + """Represents a blueprint, a collection of routes and other + app-related functions that can be registered on a real application + later. + + A blueprint is an object that allows defining application functions + without requiring an application object ahead of time. It uses the + same decorators as :class:`~flask.Flask`, but defers the need for an + application by recording them for later registration. + + Decorating a function with a blueprint creates a deferred function + that is called with :class:`~flask.blueprints.BlueprintSetupState` + when the blueprint is registered on an application. + + See :ref:`blueprints` for more information. + + .. versionchanged:: 1.1.0 + Blueprints have a ``cli`` group to register nested CLI commands. + The ``cli_group`` parameter controls the name of the group under + the ``flask`` command. + + .. versionadded:: 0.7 + + :param name: The name of the blueprint. Will be prepended to each + endpoint name. + :param import_name: The name of the blueprint package, usually + ``__name__``. This helps locate the ``root_path`` for the + blueprint. + :param static_folder: A folder with static files that should be + served by the blueprint's static route. The path is relative to + the blueprint's root path. Blueprint static files are disabled + by default. + :param static_url_path: The url to serve static files from. + Defaults to ``static_folder``. If the blueprint does not have + a ``url_prefix``, the app's static route will take precedence, + and the blueprint's static files won't be accessible. + :param template_folder: A folder with templates that should be added + to the app's template search path. The path is relative to the + blueprint's root path. Blueprint templates are disabled by + default. Blueprint templates have a lower precedence than those + in the app's templates folder. + :param url_prefix: A path to prepend to all of the blueprint's URLs, + to make them distinct from the rest of the app's routes. + :param subdomain: A subdomain that blueprint routes will match on by + default. + :param url_defaults: A dict of default values that blueprint routes + will receive by default. + :param root_path: By default, the blueprint will automatically this + based on ``import_name``. In certain situations this automatic + detection can fail, so the path can be specified manually + instead. + """ + + warn_on_modifications = False + _got_registered_once = False + + #: Blueprint local JSON decoder class to use. + #: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_encoder`. + json_encoder = None + #: Blueprint local JSON decoder class to use. + #: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_decoder`. + json_decoder = None + + # TODO remove the next three attrs when Sphinx :inherited-members: works + # https://github.com/sphinx-doc/sphinx/issues/741 + + #: The name of the package or module that this app belongs to. Do not + #: change this once it is set by the constructor. + import_name = None + + #: Location of the template files to be added to the template lookup. + #: ``None`` if templates should not be added. + template_folder = None + + #: Absolute path to the package on the filesystem. Used to look up + #: resources contained in the package. + root_path = None + + def __init__( + self, + name, + import_name, + static_folder=None, + static_url_path=None, + template_folder=None, + url_prefix=None, + subdomain=None, + url_defaults=None, + root_path=None, + cli_group=_sentinel, + ): + _PackageBoundObject.__init__( + self, import_name, template_folder, root_path=root_path + ) + self.name = name + self.url_prefix = url_prefix + self.subdomain = subdomain + self.static_folder = static_folder + self.static_url_path = static_url_path + self.deferred_functions = [] + if url_defaults is None: + url_defaults = {} + self.url_values_defaults = url_defaults + self.cli_group = cli_group + + def record(self, func): + """Registers a function that is called when the blueprint is + registered on the application. This function is called with the + state as argument as returned by the :meth:`make_setup_state` + method. + """ + if self._got_registered_once and self.warn_on_modifications: + from warnings import warn + + warn( + Warning( + "The blueprint was already registered once " + "but is getting modified now. These changes " + "will not show up." + ) + ) + self.deferred_functions.append(func) + + def record_once(self, func): + """Works like :meth:`record` but wraps the function in another + function that will ensure the function is only called once. If the + blueprint is registered a second time on the application, the + function passed is not called. + """ + + def wrapper(state): + if state.first_registration: + func(state) + + return self.record(update_wrapper(wrapper, func)) + + def make_setup_state(self, app, options, first_registration=False): + """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` + object that is later passed to the register callback functions. + Subclasses can override this to return a subclass of the setup state. + """ + return BlueprintSetupState(self, app, options, first_registration) + + def register(self, app, options, first_registration=False): + """Called by :meth:`Flask.register_blueprint` to register all views + and callbacks registered on the blueprint with the application. Creates + a :class:`.BlueprintSetupState` and calls each :meth:`record` callback + with it. + + :param app: The application this blueprint is being registered with. + :param options: Keyword arguments forwarded from + :meth:`~Flask.register_blueprint`. + :param first_registration: Whether this is the first time this + blueprint has been registered on the application. + """ + self._got_registered_once = True + state = self.make_setup_state(app, options, first_registration) + + if self.has_static_folder: + state.add_url_rule( + self.static_url_path + "/", + view_func=self.send_static_file, + endpoint="static", + ) + + for deferred in self.deferred_functions: + deferred(state) + + cli_resolved_group = options.get("cli_group", self.cli_group) + + if not self.cli.commands: + return + + if cli_resolved_group is None: + app.cli.commands.update(self.cli.commands) + elif cli_resolved_group is _sentinel: + self.cli.name = self.name + app.cli.add_command(self.cli) + else: + self.cli.name = cli_resolved_group + app.cli.add_command(self.cli) + + def route(self, rule, **options): + """Like :meth:`Flask.route` but for a blueprint. The endpoint for the + :func:`url_for` function is prefixed with the name of the blueprint. + """ + + def decorator(f): + endpoint = options.pop("endpoint", f.__name__) + self.add_url_rule(rule, endpoint, f, **options) + return f + + return decorator + + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for + the :func:`url_for` function is prefixed with the name of the blueprint. + """ + if endpoint: + assert "." not in endpoint, "Blueprint endpoints should not contain dots" + if view_func and hasattr(view_func, "__name__"): + assert ( + "." not in view_func.__name__ + ), "Blueprint view function name should not contain dots" + self.record(lambda s: s.add_url_rule(rule, endpoint, view_func, **options)) + + def endpoint(self, endpoint): + """Like :meth:`Flask.endpoint` but for a blueprint. This does not + prefix the endpoint with the blueprint name, this has to be done + explicitly by the user of this method. If the endpoint is prefixed + with a `.` it will be registered to the current blueprint, otherwise + it's an application independent endpoint. + """ + + def decorator(f): + def register_endpoint(state): + state.app.view_functions[endpoint] = f + + self.record_once(register_endpoint) + return f + + return decorator + + def app_template_filter(self, name=None): + """Register a custom template filter, available application wide. Like + :meth:`Flask.template_filter` but for a blueprint. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f): + self.add_app_template_filter(f, name=name) + return f + + return decorator + + def add_app_template_filter(self, f, name=None): + """Register a custom template filter, available application wide. Like + :meth:`Flask.add_template_filter` but for a blueprint. Works exactly + like the :meth:`app_template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def register_template(state): + state.app.jinja_env.filters[name or f.__name__] = f + + self.record_once(register_template) + + def app_template_test(self, name=None): + """Register a custom template test, available application wide. Like + :meth:`Flask.template_test` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f): + self.add_app_template_test(f, name=name) + return f + + return decorator + + def add_app_template_test(self, f, name=None): + """Register a custom template test, available application wide. Like + :meth:`Flask.add_template_test` but for a blueprint. Works exactly + like the :meth:`app_template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def register_template(state): + state.app.jinja_env.tests[name or f.__name__] = f + + self.record_once(register_template) + + def app_template_global(self, name=None): + """Register a custom template global, available application wide. Like + :meth:`Flask.template_global` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def decorator(f): + self.add_app_template_global(f, name=name) + return f + + return decorator + + def add_app_template_global(self, f, name=None): + """Register a custom template global, available application wide. Like + :meth:`Flask.add_template_global` but for a blueprint. Works exactly + like the :meth:`app_template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def register_template(state): + state.app.jinja_env.globals[name or f.__name__] = f + + self.record_once(register_template) + + def before_request(self, f): + """Like :meth:`Flask.before_request` but for a blueprint. This function + is only executed before each request that is handled by a function of + that blueprint. + """ + self.record_once( + lambda s: s.app.before_request_funcs.setdefault(self.name, []).append(f) + ) + return f + + def before_app_request(self, f): + """Like :meth:`Flask.before_request`. Such a function is executed + before each request, even if outside of a blueprint. + """ + self.record_once( + lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) + ) + return f + + def before_app_first_request(self, f): + """Like :meth:`Flask.before_first_request`. Such a function is + executed before the first request to the application. + """ + self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) + return f + + def after_request(self, f): + """Like :meth:`Flask.after_request` but for a blueprint. This function + is only executed after each request that is handled by a function of + that blueprint. + """ + self.record_once( + lambda s: s.app.after_request_funcs.setdefault(self.name, []).append(f) + ) + return f + + def after_app_request(self, f): + """Like :meth:`Flask.after_request` but for a blueprint. Such a function + is executed after each request, even if outside of the blueprint. + """ + self.record_once( + lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) + ) + return f + + def teardown_request(self, f): + """Like :meth:`Flask.teardown_request` but for a blueprint. This + function is only executed when tearing down requests handled by a + function of that blueprint. Teardown request functions are executed + when the request context is popped, even when no actual request was + performed. + """ + self.record_once( + lambda s: s.app.teardown_request_funcs.setdefault(self.name, []).append(f) + ) + return f + + def teardown_app_request(self, f): + """Like :meth:`Flask.teardown_request` but for a blueprint. Such a + function is executed when tearing down each request, even if outside of + the blueprint. + """ + self.record_once( + lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) + ) + return f + + def context_processor(self, f): + """Like :meth:`Flask.context_processor` but for a blueprint. This + function is only executed for requests handled by a blueprint. + """ + self.record_once( + lambda s: s.app.template_context_processors.setdefault( + self.name, [] + ).append(f) + ) + return f + + def app_context_processor(self, f): + """Like :meth:`Flask.context_processor` but for a blueprint. Such a + function is executed each request, even if outside of the blueprint. + """ + self.record_once( + lambda s: s.app.template_context_processors.setdefault(None, []).append(f) + ) + return f + + def app_errorhandler(self, code): + """Like :meth:`Flask.errorhandler` but for a blueprint. This + handler is used for all requests, even if outside of the blueprint. + """ + + def decorator(f): + self.record_once(lambda s: s.app.errorhandler(code)(f)) + return f + + return decorator + + def url_value_preprocessor(self, f): + """Registers a function as URL value preprocessor for this + blueprint. It's called before the view functions are called and + can modify the url values provided. + """ + self.record_once( + lambda s: s.app.url_value_preprocessors.setdefault(self.name, []).append(f) + ) + return f + + def url_defaults(self, f): + """Callback function for URL defaults for this blueprint. It's called + with the endpoint and values and should update the values passed + in place. + """ + self.record_once( + lambda s: s.app.url_default_functions.setdefault(self.name, []).append(f) + ) + return f + + def app_url_value_preprocessor(self, f): + """Same as :meth:`url_value_preprocessor` but application wide. + """ + self.record_once( + lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) + ) + return f + + def app_url_defaults(self, f): + """Same as :meth:`url_defaults` but application wide. + """ + self.record_once( + lambda s: s.app.url_default_functions.setdefault(None, []).append(f) + ) + return f + + def errorhandler(self, code_or_exception): + """Registers an error handler that becomes active for this blueprint + only. Please be aware that routing does not happen local to a + blueprint so an error handler for 404 usually is not handled by + a blueprint unless it is caused inside a view function. Another + special case is the 500 internal server error which is always looked + up from the application. + + Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator + of the :class:`~flask.Flask` object. + """ + + def decorator(f): + self.record_once( + lambda s: s.app._register_error_handler(self.name, code_or_exception, f) + ) + return f + + return decorator + + def register_error_handler(self, code_or_exception, f): + """Non-decorator version of the :meth:`errorhandler` error attach + function, akin to the :meth:`~flask.Flask.register_error_handler` + application-wide function of the :class:`~flask.Flask` object but + for error handlers limited to this blueprint. + + .. versionadded:: 0.11 + """ + self.record_once( + lambda s: s.app._register_error_handler(self.name, code_or_exception, f) + ) diff --git a/py3/lib/python3.6/site-packages/flask/cli.py b/py3/lib/python3.6/site-packages/flask/cli.py new file mode 100644 index 0000000..1158545 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/cli.py @@ -0,0 +1,970 @@ +# -*- coding: utf-8 -*- +""" + flask.cli + ~~~~~~~~~ + + A simple command line application to run flask apps. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +from __future__ import print_function + +import ast +import inspect +import os +import platform +import re +import sys +import traceback +from functools import update_wrapper +from operator import attrgetter +from threading import Lock +from threading import Thread + +import click +from werkzeug.utils import import_string + +from ._compat import getargspec +from ._compat import itervalues +from ._compat import reraise +from ._compat import text_type +from .globals import current_app +from .helpers import get_debug_flag +from .helpers import get_env +from .helpers import get_load_dotenv + +try: + import dotenv +except ImportError: + dotenv = None + +try: + import ssl +except ImportError: + ssl = None + + +class NoAppException(click.UsageError): + """Raised if an application cannot be found or loaded.""" + + +def find_best_app(script_info, module): + """Given a module instance this tries to find the best possible + application in the module or raises an exception. + """ + from . import Flask + + # Search for the most common names first. + for attr_name in ("app", "application"): + app = getattr(module, attr_name, None) + + if isinstance(app, Flask): + return app + + # Otherwise find the only object that is a Flask instance. + matches = [v for v in itervalues(module.__dict__) if isinstance(v, Flask)] + + if len(matches) == 1: + return matches[0] + elif len(matches) > 1: + raise NoAppException( + 'Detected multiple Flask applications in module "{module}". Use ' + '"FLASK_APP={module}:name" to specify the correct ' + "one.".format(module=module.__name__) + ) + + # Search for app factory functions. + for attr_name in ("create_app", "make_app"): + app_factory = getattr(module, attr_name, None) + + if inspect.isfunction(app_factory): + try: + app = call_factory(script_info, app_factory) + + if isinstance(app, Flask): + return app + except TypeError: + if not _called_with_wrong_args(app_factory): + raise + raise NoAppException( + 'Detected factory "{factory}" in module "{module}", but ' + "could not call it without arguments. Use " + "\"FLASK_APP='{module}:{factory}(args)'\" to specify " + "arguments.".format(factory=attr_name, module=module.__name__) + ) + + raise NoAppException( + 'Failed to find Flask application or factory in module "{module}". ' + 'Use "FLASK_APP={module}:name to specify one.'.format(module=module.__name__) + ) + + +def call_factory(script_info, app_factory, arguments=()): + """Takes an app factory, a ``script_info` object and optionally a tuple + of arguments. Checks for the existence of a script_info argument and calls + the app_factory depending on that and the arguments provided. + """ + args_spec = getargspec(app_factory) + arg_names = args_spec.args + arg_defaults = args_spec.defaults + + if "script_info" in arg_names: + return app_factory(*arguments, script_info=script_info) + elif arguments: + return app_factory(*arguments) + elif not arguments and len(arg_names) == 1 and arg_defaults is None: + return app_factory(script_info) + + return app_factory() + + +def _called_with_wrong_args(factory): + """Check whether calling a function raised a ``TypeError`` because + the call failed or because something in the factory raised the + error. + + :param factory: the factory function that was called + :return: true if the call failed + """ + tb = sys.exc_info()[2] + + try: + while tb is not None: + if tb.tb_frame.f_code is factory.__code__: + # in the factory, it was called successfully + return False + + tb = tb.tb_next + + # didn't reach the factory + return True + finally: + # explicitly delete tb as it is circular referenced + # https://docs.python.org/2/library/sys.html#sys.exc_info + del tb + + +def find_app_by_string(script_info, module, app_name): + """Checks if the given string is a variable name or a function. If it is a + function, it checks for specified arguments and whether it takes a + ``script_info`` argument and calls the function with the appropriate + arguments. + """ + from . import Flask + + match = re.match(r"^ *([^ ()]+) *(?:\((.*?) *,? *\))? *$", app_name) + + if not match: + raise NoAppException( + '"{name}" is not a valid variable name or function ' + "expression.".format(name=app_name) + ) + + name, args = match.groups() + + try: + attr = getattr(module, name) + except AttributeError as e: + raise NoAppException(e.args[0]) + + if inspect.isfunction(attr): + if args: + try: + args = ast.literal_eval("({args},)".format(args=args)) + except (ValueError, SyntaxError) as e: + raise NoAppException( + "Could not parse the arguments in " + '"{app_name}".'.format(e=e, app_name=app_name) + ) + else: + args = () + + try: + app = call_factory(script_info, attr, args) + except TypeError as e: + if not _called_with_wrong_args(attr): + raise + + raise NoAppException( + '{e}\nThe factory "{app_name}" in module "{module}" could not ' + "be called with the specified arguments.".format( + e=e, app_name=app_name, module=module.__name__ + ) + ) + else: + app = attr + + if isinstance(app, Flask): + return app + + raise NoAppException( + "A valid Flask application was not obtained from " + '"{module}:{app_name}".'.format(module=module.__name__, app_name=app_name) + ) + + +def prepare_import(path): + """Given a filename this will try to calculate the python path, add it + to the search path and return the actual module name that is expected. + """ + path = os.path.realpath(path) + + fname, ext = os.path.splitext(path) + if ext == ".py": + path = fname + + if os.path.basename(path) == "__init__": + path = os.path.dirname(path) + + module_name = [] + + # move up until outside package structure (no __init__.py) + while True: + path, name = os.path.split(path) + module_name.append(name) + + if not os.path.exists(os.path.join(path, "__init__.py")): + break + + if sys.path[0] != path: + sys.path.insert(0, path) + + return ".".join(module_name[::-1]) + + +def locate_app(script_info, module_name, app_name, raise_if_not_found=True): + __traceback_hide__ = True # noqa: F841 + + try: + __import__(module_name) + except ImportError: + # Reraise the ImportError if it occurred within the imported module. + # Determine this by checking whether the trace has a depth > 1. + if sys.exc_info()[-1].tb_next: + raise NoAppException( + 'While importing "{name}", an ImportError was raised:' + "\n\n{tb}".format(name=module_name, tb=traceback.format_exc()) + ) + elif raise_if_not_found: + raise NoAppException('Could not import "{name}".'.format(name=module_name)) + else: + return + + module = sys.modules[module_name] + + if app_name is None: + return find_best_app(script_info, module) + else: + return find_app_by_string(script_info, module, app_name) + + +def get_version(ctx, param, value): + if not value or ctx.resilient_parsing: + return + + import werkzeug + from . import __version__ + + message = "Python %(python)s\nFlask %(flask)s\nWerkzeug %(werkzeug)s" + click.echo( + message + % { + "python": platform.python_version(), + "flask": __version__, + "werkzeug": werkzeug.__version__, + }, + color=ctx.color, + ) + ctx.exit() + + +version_option = click.Option( + ["--version"], + help="Show the flask version", + expose_value=False, + callback=get_version, + is_flag=True, + is_eager=True, +) + + +class DispatchingApp(object): + """Special application that dispatches to a Flask application which + is imported by name in a background thread. If an error happens + it is recorded and shown as part of the WSGI handling which in case + of the Werkzeug debugger means that it shows up in the browser. + """ + + def __init__(self, loader, use_eager_loading=False): + self.loader = loader + self._app = None + self._lock = Lock() + self._bg_loading_exc_info = None + if use_eager_loading: + self._load_unlocked() + else: + self._load_in_background() + + def _load_in_background(self): + def _load_app(): + __traceback_hide__ = True # noqa: F841 + with self._lock: + try: + self._load_unlocked() + except Exception: + self._bg_loading_exc_info = sys.exc_info() + + t = Thread(target=_load_app, args=()) + t.start() + + def _flush_bg_loading_exception(self): + __traceback_hide__ = True # noqa: F841 + exc_info = self._bg_loading_exc_info + if exc_info is not None: + self._bg_loading_exc_info = None + reraise(*exc_info) + + def _load_unlocked(self): + __traceback_hide__ = True # noqa: F841 + self._app = rv = self.loader() + self._bg_loading_exc_info = None + return rv + + def __call__(self, environ, start_response): + __traceback_hide__ = True # noqa: F841 + if self._app is not None: + return self._app(environ, start_response) + self._flush_bg_loading_exception() + with self._lock: + if self._app is not None: + rv = self._app + else: + rv = self._load_unlocked() + return rv(environ, start_response) + + +class ScriptInfo(object): + """Helper object to deal with Flask applications. This is usually not + necessary to interface with as it's used internally in the dispatching + to click. In future versions of Flask this object will most likely play + a bigger role. Typically it's created automatically by the + :class:`FlaskGroup` but you can also manually create it and pass it + onwards as click object. + """ + + def __init__(self, app_import_path=None, create_app=None, set_debug_flag=True): + #: Optionally the import path for the Flask application. + self.app_import_path = app_import_path or os.environ.get("FLASK_APP") + #: Optionally a function that is passed the script info to create + #: the instance of the application. + self.create_app = create_app + #: A dictionary with arbitrary data that can be associated with + #: this script info. + self.data = {} + self.set_debug_flag = set_debug_flag + self._loaded_app = None + + def load_app(self): + """Loads the Flask app (if not yet loaded) and returns it. Calling + this multiple times will just result in the already loaded app to + be returned. + """ + __traceback_hide__ = True # noqa: F841 + + if self._loaded_app is not None: + return self._loaded_app + + app = None + + if self.create_app is not None: + app = call_factory(self, self.create_app) + else: + if self.app_import_path: + path, name = ( + re.split(r":(?![\\/])", self.app_import_path, 1) + [None] + )[:2] + import_name = prepare_import(path) + app = locate_app(self, import_name, name) + else: + for path in ("wsgi.py", "app.py"): + import_name = prepare_import(path) + app = locate_app(self, import_name, None, raise_if_not_found=False) + + if app: + break + + if not app: + raise NoAppException( + "Could not locate a Flask application. You did not provide " + 'the "FLASK_APP" environment variable, and a "wsgi.py" or ' + '"app.py" module was not found in the current directory.' + ) + + if self.set_debug_flag: + # Update the app's debug flag through the descriptor so that + # other values repopulate as well. + app.debug = get_debug_flag() + + self._loaded_app = app + return app + + +pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) + + +def with_appcontext(f): + """Wraps a callback so that it's guaranteed to be executed with the + script's application context. If callbacks are registered directly + to the ``app.cli`` object then they are wrapped with this function + by default unless it's disabled. + """ + + @click.pass_context + def decorator(__ctx, *args, **kwargs): + with __ctx.ensure_object(ScriptInfo).load_app().app_context(): + return __ctx.invoke(f, *args, **kwargs) + + return update_wrapper(decorator, f) + + +class AppGroup(click.Group): + """This works similar to a regular click :class:`~click.Group` but it + changes the behavior of the :meth:`command` decorator so that it + automatically wraps the functions in :func:`with_appcontext`. + + Not to be confused with :class:`FlaskGroup`. + """ + + def command(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` + unless it's disabled by passing ``with_appcontext=False``. + """ + wrap_for_ctx = kwargs.pop("with_appcontext", True) + + def decorator(f): + if wrap_for_ctx: + f = with_appcontext(f) + return click.Group.command(self, *args, **kwargs)(f) + + return decorator + + def group(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it defaults the group class to + :class:`AppGroup`. + """ + kwargs.setdefault("cls", AppGroup) + return click.Group.group(self, *args, **kwargs) + + +class FlaskGroup(AppGroup): + """Special subclass of the :class:`AppGroup` group that supports + loading more commands from the configured Flask app. Normally a + developer does not have to interface with this class but there are + some very advanced use cases for which it makes sense to create an + instance of this. + + For information as of why this is useful see :ref:`custom-scripts`. + + :param add_default_commands: if this is True then the default run and + shell commands will be added. + :param add_version_option: adds the ``--version`` option. + :param create_app: an optional callback that is passed the script info and + returns the loaded app. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param set_debug_flag: Set the app's debug flag based on the active + environment + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment variables + from :file:`.env` and :file:`.flaskenv` files. + """ + + def __init__( + self, + add_default_commands=True, + create_app=None, + add_version_option=True, + load_dotenv=True, + set_debug_flag=True, + **extra + ): + params = list(extra.pop("params", None) or ()) + + if add_version_option: + params.append(version_option) + + AppGroup.__init__(self, params=params, **extra) + self.create_app = create_app + self.load_dotenv = load_dotenv + self.set_debug_flag = set_debug_flag + + if add_default_commands: + self.add_command(run_command) + self.add_command(shell_command) + self.add_command(routes_command) + + self._loaded_plugin_commands = False + + def _load_plugin_commands(self): + if self._loaded_plugin_commands: + return + try: + import pkg_resources + except ImportError: + self._loaded_plugin_commands = True + return + + for ep in pkg_resources.iter_entry_points("flask.commands"): + self.add_command(ep.load(), ep.name) + self._loaded_plugin_commands = True + + def get_command(self, ctx, name): + self._load_plugin_commands() + + # We load built-in commands first as these should always be the + # same no matter what the app does. If the app does want to + # override this it needs to make a custom instance of this group + # and not attach the default commands. + # + # This also means that the script stays functional in case the + # application completely fails. + rv = AppGroup.get_command(self, ctx, name) + if rv is not None: + return rv + + info = ctx.ensure_object(ScriptInfo) + try: + rv = info.load_app().cli.get_command(ctx, name) + if rv is not None: + return rv + except NoAppException: + pass + + def list_commands(self, ctx): + self._load_plugin_commands() + + # The commands available is the list of both the application (if + # available) plus the builtin commands. + rv = set(click.Group.list_commands(self, ctx)) + info = ctx.ensure_object(ScriptInfo) + try: + rv.update(info.load_app().cli.list_commands(ctx)) + except Exception: + # Here we intentionally swallow all exceptions as we don't + # want the help page to break if the app does not exist. + # If someone attempts to use the command we try to create + # the app again and this will give us the error. + # However, we will not do so silently because that would confuse + # users. + traceback.print_exc() + return sorted(rv) + + def main(self, *args, **kwargs): + # Set a global flag that indicates that we were invoked from the + # command line interface. This is detected by Flask.run to make the + # call into a no-op. This is necessary to avoid ugly errors when the + # script that is loaded here also attempts to start a server. + os.environ["FLASK_RUN_FROM_CLI"] = "true" + + if get_load_dotenv(self.load_dotenv): + load_dotenv() + + obj = kwargs.get("obj") + + if obj is None: + obj = ScriptInfo( + create_app=self.create_app, set_debug_flag=self.set_debug_flag + ) + + kwargs["obj"] = obj + kwargs.setdefault("auto_envvar_prefix", "FLASK") + return super(FlaskGroup, self).main(*args, **kwargs) + + +def _path_is_ancestor(path, other): + """Take ``other`` and remove the length of ``path`` from it. Then join it + to ``path``. If it is the original value, ``path`` is an ancestor of + ``other``.""" + return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other + + +def load_dotenv(path=None): + """Load "dotenv" files in order of precedence to set environment variables. + + If an env var is already set it is not overwritten, so earlier files in the + list are preferred over later files. + + Changes the current working directory to the location of the first file + found, with the assumption that it is in the top level project directory + and will be where the Python path should import local packages from. + + This is a no-op if `python-dotenv`_ is not installed. + + .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme + + :param path: Load the file at this location instead of searching. + :return: ``True`` if a file was loaded. + + .. versionchanged:: 1.1.0 + Returns ``False`` when python-dotenv is not installed, or when + the given path isn't a file. + + .. versionadded:: 1.0 + """ + if dotenv is None: + if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): + click.secho( + " * Tip: There are .env or .flaskenv files present." + ' Do "pip install python-dotenv" to use them.', + fg="yellow", + err=True, + ) + + return False + + # if the given path specifies the actual file then return True, + # else False + if path is not None: + if os.path.isfile(path): + return dotenv.load_dotenv(path) + + return False + + new_dir = None + + for name in (".env", ".flaskenv"): + path = dotenv.find_dotenv(name, usecwd=True) + + if not path: + continue + + if new_dir is None: + new_dir = os.path.dirname(path) + + dotenv.load_dotenv(path) + + if new_dir and os.getcwd() != new_dir: + os.chdir(new_dir) + + return new_dir is not None # at least one file was located and loaded + + +def show_server_banner(env, debug, app_import_path, eager_loading): + """Show extra startup messages the first time the server is run, + ignoring the reloader. + """ + if os.environ.get("WERKZEUG_RUN_MAIN") == "true": + return + + if app_import_path is not None: + message = ' * Serving Flask app "{0}"'.format(app_import_path) + + if not eager_loading: + message += " (lazy loading)" + + click.echo(message) + + click.echo(" * Environment: {0}".format(env)) + + if env == "production": + click.secho( + " WARNING: This is a development server. " + "Do not use it in a production deployment.", + fg="red", + ) + click.secho(" Use a production WSGI server instead.", dim=True) + + if debug is not None: + click.echo(" * Debug mode: {0}".format("on" if debug else "off")) + + +class CertParamType(click.ParamType): + """Click option type for the ``--cert`` option. Allows either an + existing file, the string ``'adhoc'``, or an import for a + :class:`~ssl.SSLContext` object. + """ + + name = "path" + + def __init__(self): + self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) + + def convert(self, value, param, ctx): + if ssl is None: + raise click.BadParameter( + 'Using "--cert" requires Python to be compiled with SSL support.', + ctx, + param, + ) + + try: + return self.path_type(value, param, ctx) + except click.BadParameter: + value = click.STRING(value, param, ctx).lower() + + if value == "adhoc": + try: + import OpenSSL # noqa: F401 + except ImportError: + raise click.BadParameter( + "Using ad-hoc certificates requires pyOpenSSL.", ctx, param + ) + + return value + + obj = import_string(value, silent=True) + + if sys.version_info < (2, 7, 9): + if obj: + return obj + else: + if isinstance(obj, ssl.SSLContext): + return obj + + raise + + +def _validate_key(ctx, param, value): + """The ``--key`` option must be specified when ``--cert`` is a file. + Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. + """ + cert = ctx.params.get("cert") + is_adhoc = cert == "adhoc" + + if sys.version_info < (2, 7, 9): + is_context = cert and not isinstance(cert, (text_type, bytes)) + else: + is_context = isinstance(cert, ssl.SSLContext) + + if value is not None: + if is_adhoc: + raise click.BadParameter( + 'When "--cert" is "adhoc", "--key" is not used.', ctx, param + ) + + if is_context: + raise click.BadParameter( + 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param + ) + + if not cert: + raise click.BadParameter('"--cert" must also be specified.', ctx, param) + + ctx.params["cert"] = cert, value + + else: + if cert and not (is_adhoc or is_context): + raise click.BadParameter('Required when using "--cert".', ctx, param) + + return value + + +class SeparatedPathType(click.Path): + """Click option type that accepts a list of values separated by the + OS's path separator (``:``, ``;`` on Windows). Each value is + validated as a :class:`click.Path` type. + """ + + def convert(self, value, param, ctx): + items = self.split_envvar_value(value) + super_convert = super(SeparatedPathType, self).convert + return [super_convert(item, param, ctx) for item in items] + + +@click.command("run", short_help="Run a development server.") +@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") +@click.option("--port", "-p", default=5000, help="The port to bind to.") +@click.option( + "--cert", type=CertParamType(), help="Specify a certificate file to use HTTPS." +) +@click.option( + "--key", + type=click.Path(exists=True, dir_okay=False, resolve_path=True), + callback=_validate_key, + expose_value=False, + help="The key file to use when specifying a certificate.", +) +@click.option( + "--reload/--no-reload", + default=None, + help="Enable or disable the reloader. By default the reloader " + "is active if debug is enabled.", +) +@click.option( + "--debugger/--no-debugger", + default=None, + help="Enable or disable the debugger. By default the debugger " + "is active if debug is enabled.", +) +@click.option( + "--eager-loading/--lazy-loader", + default=None, + help="Enable or disable eager loading. By default eager " + "loading is enabled if the reloader is disabled.", +) +@click.option( + "--with-threads/--without-threads", + default=True, + help="Enable or disable multithreading.", +) +@click.option( + "--extra-files", + default=None, + type=SeparatedPathType(), + help=( + "Extra files that trigger a reload on change. Multiple paths" + " are separated by '{}'.".format(os.path.pathsep) + ), +) +@pass_script_info +def run_command( + info, host, port, reload, debugger, eager_loading, with_threads, cert, extra_files +): + """Run a local development server. + + This server is for development purposes only. It does not provide + the stability, security, or performance of production WSGI servers. + + The reloader and debugger are enabled by default if + FLASK_ENV=development or FLASK_DEBUG=1. + """ + debug = get_debug_flag() + + if reload is None: + reload = debug + + if debugger is None: + debugger = debug + + if eager_loading is None: + eager_loading = not reload + + show_server_banner(get_env(), debug, info.app_import_path, eager_loading) + app = DispatchingApp(info.load_app, use_eager_loading=eager_loading) + + from werkzeug.serving import run_simple + + run_simple( + host, + port, + app, + use_reloader=reload, + use_debugger=debugger, + threaded=with_threads, + ssl_context=cert, + extra_files=extra_files, + ) + + +@click.command("shell", short_help="Run a shell in the app context.") +@with_appcontext +def shell_command(): + """Run an interactive Python shell in the context of a given + Flask application. The application will populate the default + namespace of this shell according to it's configuration. + + This is useful for executing small snippets of management code + without having to manually configure the application. + """ + import code + from .globals import _app_ctx_stack + + app = _app_ctx_stack.top.app + banner = "Python %s on %s\nApp: %s [%s]\nInstance: %s" % ( + sys.version, + sys.platform, + app.import_name, + app.env, + app.instance_path, + ) + ctx = {} + + # Support the regular Python interpreter startup script if someone + # is using it. + startup = os.environ.get("PYTHONSTARTUP") + if startup and os.path.isfile(startup): + with open(startup, "r") as f: + eval(compile(f.read(), startup, "exec"), ctx) + + ctx.update(app.make_shell_context()) + + code.interact(banner=banner, local=ctx) + + +@click.command("routes", short_help="Show the routes for the app.") +@click.option( + "--sort", + "-s", + type=click.Choice(("endpoint", "methods", "rule", "match")), + default="endpoint", + help=( + 'Method to sort routes by. "match" is the order that Flask will match ' + "routes when dispatching a request." + ), +) +@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") +@with_appcontext +def routes_command(sort, all_methods): + """Show all registered routes with endpoints and methods.""" + + rules = list(current_app.url_map.iter_rules()) + if not rules: + click.echo("No routes were registered.") + return + + ignored_methods = set(() if all_methods else ("HEAD", "OPTIONS")) + + if sort in ("endpoint", "rule"): + rules = sorted(rules, key=attrgetter(sort)) + elif sort == "methods": + rules = sorted(rules, key=lambda rule: sorted(rule.methods)) + + rule_methods = [", ".join(sorted(rule.methods - ignored_methods)) for rule in rules] + + headers = ("Endpoint", "Methods", "Rule") + widths = ( + max(len(rule.endpoint) for rule in rules), + max(len(methods) for methods in rule_methods), + max(len(rule.rule) for rule in rules), + ) + widths = [max(len(h), w) for h, w in zip(headers, widths)] + row = "{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}".format(*widths) + + click.echo(row.format(*headers).strip()) + click.echo(row.format(*("-" * width for width in widths))) + + for rule, methods in zip(rules, rule_methods): + click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) + + +cli = FlaskGroup( + help="""\ +A general utility script for Flask applications. + +Provides commands from Flask, extensions, and the application. Loads the +application defined in the FLASK_APP environment variable, or from a wsgi.py +file. Setting the FLASK_ENV environment variable to 'development' will enable +debug mode. + +\b + {prefix}{cmd} FLASK_APP=hello.py + {prefix}{cmd} FLASK_ENV=development + {prefix}flask run +""".format( + cmd="export" if os.name == "posix" else "set", + prefix="$ " if os.name == "posix" else "> ", + ) +) + + +def main(as_module=False): + cli.main(prog_name="python -m flask" if as_module else None) + + +if __name__ == "__main__": + main(as_module=True) diff --git a/py3/lib/python3.6/site-packages/flask/config.py b/py3/lib/python3.6/site-packages/flask/config.py new file mode 100644 index 0000000..809de33 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/config.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +""" + flask.config + ~~~~~~~~~~~~ + + Implements the configuration related objects. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +import errno +import os +import types + +from werkzeug.utils import import_string + +from . import json +from ._compat import iteritems +from ._compat import string_types + + +class ConfigAttribute(object): + """Makes an attribute forward to the config""" + + def __init__(self, name, get_converter=None): + self.__name__ = name + self.get_converter = get_converter + + def __get__(self, obj, type=None): + if obj is None: + return self + rv = obj.config[self.__name__] + if self.get_converter is not None: + rv = self.get_converter(rv) + return rv + + def __set__(self, obj, value): + obj.config[self.__name__] = value + + +class Config(dict): + """Works exactly like a dict but provides ways to fill it from files + or special dictionaries. There are two common patterns to populate the + config. + + Either you can fill the config from a config file:: + + app.config.from_pyfile('yourconfig.cfg') + + Or alternatively you can define the configuration options in the + module that calls :meth:`from_object` or provide an import path to + a module that should be loaded. It is also possible to tell it to + use the same module and with that provide the configuration values + just before the call:: + + DEBUG = True + SECRET_KEY = 'development key' + app.config.from_object(__name__) + + In both cases (loading from any Python file or loading from modules), + only uppercase keys are added to the config. This makes it possible to use + lowercase values in the config file for temporary values that are not added + to the config or to define the config keys in the same file that implements + the application. + + Probably the most interesting way to load configurations is from an + environment variable pointing to a file:: + + app.config.from_envvar('YOURAPPLICATION_SETTINGS') + + In this case before launching the application you have to set this + environment variable to the file you want to use. On Linux and OS X + use the export statement:: + + export YOURAPPLICATION_SETTINGS='/path/to/config/file' + + On windows use `set` instead. + + :param root_path: path to which files are read relative from. When the + config object is created by the application, this is + the application's :attr:`~flask.Flask.root_path`. + :param defaults: an optional dictionary of default values + """ + + def __init__(self, root_path, defaults=None): + dict.__init__(self, defaults or {}) + self.root_path = root_path + + def from_envvar(self, variable_name, silent=False): + """Loads a configuration from an environment variable pointing to + a configuration file. This is basically just a shortcut with nicer + error messages for this line of code:: + + app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) + + :param variable_name: name of the environment variable + :param silent: set to ``True`` if you want silent failure for missing + files. + :return: bool. ``True`` if able to load config, ``False`` otherwise. + """ + rv = os.environ.get(variable_name) + if not rv: + if silent: + return False + raise RuntimeError( + "The environment variable %r is not set " + "and as such configuration could not be " + "loaded. Set this variable and make it " + "point to a configuration file" % variable_name + ) + return self.from_pyfile(rv, silent=silent) + + def from_pyfile(self, filename, silent=False): + """Updates the values in the config from a Python file. This function + behaves as if the file was imported as module with the + :meth:`from_object` function. + + :param filename: the filename of the config. This can either be an + absolute filename or a filename relative to the + root path. + :param silent: set to ``True`` if you want silent failure for missing + files. + + .. versionadded:: 0.7 + `silent` parameter. + """ + filename = os.path.join(self.root_path, filename) + d = types.ModuleType("config") + d.__file__ = filename + try: + with open(filename, mode="rb") as config_file: + exec(compile(config_file.read(), filename, "exec"), d.__dict__) + except IOError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): + return False + e.strerror = "Unable to load configuration file (%s)" % e.strerror + raise + self.from_object(d) + return True + + def from_object(self, obj): + """Updates the values from the given object. An object can be of one + of the following two types: + + - a string: in this case the object with that name will be imported + - an actual object reference: that object is used directly + + Objects are usually either modules or classes. :meth:`from_object` + loads only the uppercase attributes of the module/class. A ``dict`` + object will not work with :meth:`from_object` because the keys of a + ``dict`` are not attributes of the ``dict`` class. + + Example of module-based configuration:: + + app.config.from_object('yourapplication.default_config') + from yourapplication import default_config + app.config.from_object(default_config) + + Nothing is done to the object before loading. If the object is a + class and has ``@property`` attributes, it needs to be + instantiated before being passed to this method. + + You should not use this function to load the actual configuration but + rather configuration defaults. The actual config should be loaded + with :meth:`from_pyfile` and ideally from a location not within the + package because the package might be installed system wide. + + See :ref:`config-dev-prod` for an example of class-based configuration + using :meth:`from_object`. + + :param obj: an import name or object + """ + if isinstance(obj, string_types): + obj = import_string(obj) + for key in dir(obj): + if key.isupper(): + self[key] = getattr(obj, key) + + def from_json(self, filename, silent=False): + """Updates the values in the config from a JSON file. This function + behaves as if the JSON object was a dictionary and passed to the + :meth:`from_mapping` function. + + :param filename: the filename of the JSON file. This can either be an + absolute filename or a filename relative to the + root path. + :param silent: set to ``True`` if you want silent failure for missing + files. + + .. versionadded:: 0.11 + """ + filename = os.path.join(self.root_path, filename) + + try: + with open(filename) as json_file: + obj = json.loads(json_file.read()) + except IOError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR): + return False + e.strerror = "Unable to load configuration file (%s)" % e.strerror + raise + return self.from_mapping(obj) + + def from_mapping(self, *mapping, **kwargs): + """Updates the config like :meth:`update` ignoring items with non-upper + keys. + + .. versionadded:: 0.11 + """ + mappings = [] + if len(mapping) == 1: + if hasattr(mapping[0], "items"): + mappings.append(mapping[0].items()) + else: + mappings.append(mapping[0]) + elif len(mapping) > 1: + raise TypeError( + "expected at most 1 positional argument, got %d" % len(mapping) + ) + mappings.append(kwargs.items()) + for mapping in mappings: + for (key, value) in mapping: + if key.isupper(): + self[key] = value + return True + + def get_namespace(self, namespace, lowercase=True, trim_namespace=True): + """Returns a dictionary containing a subset of configuration options + that match the specified namespace/prefix. Example usage:: + + app.config['IMAGE_STORE_TYPE'] = 'fs' + app.config['IMAGE_STORE_PATH'] = '/var/app/images' + app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' + image_store_config = app.config.get_namespace('IMAGE_STORE_') + + The resulting dictionary `image_store_config` would look like:: + + { + 'type': 'fs', + 'path': '/var/app/images', + 'base_url': 'http://img.website.com' + } + + This is often useful when configuration options map directly to + keyword arguments in functions or class constructors. + + :param namespace: a configuration namespace + :param lowercase: a flag indicating if the keys of the resulting + dictionary should be lowercase + :param trim_namespace: a flag indicating if the keys of the resulting + dictionary should not include the namespace + + .. versionadded:: 0.11 + """ + rv = {} + for k, v in iteritems(self): + if not k.startswith(namespace): + continue + if trim_namespace: + key = k[len(namespace) :] + else: + key = k + if lowercase: + key = key.lower() + rv[key] = v + return rv + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, dict.__repr__(self)) diff --git a/py3/lib/python3.6/site-packages/flask/ctx.py b/py3/lib/python3.6/site-packages/flask/ctx.py new file mode 100644 index 0000000..172f6a0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/ctx.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- +""" + flask.ctx + ~~~~~~~~~ + + Implements the objects required to keep the context. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +import sys +from functools import update_wrapper + +from werkzeug.exceptions import HTTPException + +from ._compat import BROKEN_PYPY_CTXMGR_EXIT +from ._compat import reraise +from .globals import _app_ctx_stack +from .globals import _request_ctx_stack +from .signals import appcontext_popped +from .signals import appcontext_pushed + + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +class _AppCtxGlobals(object): + """A plain object. Used as a namespace for storing data during an + application context. + + Creating an app context automatically creates this object, which is + made available as the :data:`g` proxy. + + .. describe:: 'key' in g + + Check whether an attribute is present. + + .. versionadded:: 0.10 + + .. describe:: iter(g) + + Return an iterator over the attribute names. + + .. versionadded:: 0.10 + """ + + def get(self, name, default=None): + """Get an attribute by name, or a default value. Like + :meth:`dict.get`. + + :param name: Name of attribute to get. + :param default: Value to return if the attribute is not present. + + .. versionadded:: 0.10 + """ + return self.__dict__.get(name, default) + + def pop(self, name, default=_sentinel): + """Get and remove an attribute by name. Like :meth:`dict.pop`. + + :param name: Name of attribute to pop. + :param default: Value to return if the attribute is not present, + instead of raise a ``KeyError``. + + .. versionadded:: 0.11 + """ + if default is _sentinel: + return self.__dict__.pop(name) + else: + return self.__dict__.pop(name, default) + + def setdefault(self, name, default=None): + """Get the value of an attribute if it is present, otherwise + set and return a default value. Like :meth:`dict.setdefault`. + + :param name: Name of attribute to get. + :param: default: Value to set and return if the attribute is not + present. + + .. versionadded:: 0.11 + """ + return self.__dict__.setdefault(name, default) + + def __contains__(self, item): + return item in self.__dict__ + + def __iter__(self): + return iter(self.__dict__) + + def __repr__(self): + top = _app_ctx_stack.top + if top is not None: + return "" % top.app.name + return object.__repr__(self) + + +def after_this_request(f): + """Executes a function after this request. This is useful to modify + response objects. The function is passed the response object and has + to return the same or a new one. + + Example:: + + @app.route('/') + def index(): + @after_this_request + def add_header(response): + response.headers['X-Foo'] = 'Parachute' + return response + return 'Hello World!' + + This is more useful if a function other than the view function wants to + modify a response. For instance think of a decorator that wants to add + some headers without converting the return value into a response object. + + .. versionadded:: 0.9 + """ + _request_ctx_stack.top._after_request_functions.append(f) + return f + + +def copy_current_request_context(f): + """A helper function that decorates a function to retain the current + request context. This is useful when working with greenlets. The moment + the function is decorated a copy of the request context is created and + then pushed when the function is called. The current session is also + included in the copied request context. + + Example:: + + import gevent + from flask import copy_current_request_context + + @app.route('/') + def index(): + @copy_current_request_context + def do_some_work(): + # do some work here, it can access flask.request or + # flask.session like you would otherwise in the view function. + ... + gevent.spawn(do_some_work) + return 'Regular response' + + .. versionadded:: 0.10 + """ + top = _request_ctx_stack.top + if top is None: + raise RuntimeError( + "This decorator can only be used at local scopes " + "when a request context is on the stack. For instance within " + "view functions." + ) + reqctx = top.copy() + + def wrapper(*args, **kwargs): + with reqctx: + return f(*args, **kwargs) + + return update_wrapper(wrapper, f) + + +def has_request_context(): + """If you have code that wants to test if a request context is there or + not this function can be used. For instance, you may want to take advantage + of request information if the request object is available, but fail + silently if it is unavailable. + + :: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and has_request_context(): + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + Alternatively you can also just test any of the context bound objects + (such as :class:`request` or :class:`g`) for truthness:: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and request: + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + .. versionadded:: 0.7 + """ + return _request_ctx_stack.top is not None + + +def has_app_context(): + """Works like :func:`has_request_context` but for the application + context. You can also just do a boolean check on the + :data:`current_app` object instead. + + .. versionadded:: 0.9 + """ + return _app_ctx_stack.top is not None + + +class AppContext(object): + """The application context binds an application object implicitly + to the current thread or greenlet, similar to how the + :class:`RequestContext` binds request information. The application + context is also implicitly created if a request context is created + but the application is not on top of the individual application + context. + """ + + def __init__(self, app): + self.app = app + self.url_adapter = app.create_url_adapter(None) + self.g = app.app_ctx_globals_class() + + # Like request context, app contexts can be pushed multiple times + # but there a basic "refcount" is enough to track them. + self._refcnt = 0 + + def push(self): + """Binds the app context to the current context.""" + self._refcnt += 1 + if hasattr(sys, "exc_clear"): + sys.exc_clear() + _app_ctx_stack.push(self) + appcontext_pushed.send(self.app) + + def pop(self, exc=_sentinel): + """Pops the app context.""" + try: + self._refcnt -= 1 + if self._refcnt <= 0: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_appcontext(exc) + finally: + rv = _app_ctx_stack.pop() + assert rv is self, "Popped wrong app context. (%r instead of %r)" % (rv, self) + appcontext_popped.send(self.app) + + def __enter__(self): + self.push() + return self + + def __exit__(self, exc_type, exc_value, tb): + self.pop(exc_value) + + if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: + reraise(exc_type, exc_value, tb) + + +class RequestContext(object): + """The request context contains all request relevant information. It is + created at the beginning of the request and pushed to the + `_request_ctx_stack` and removed at the end of it. It will create the + URL adapter and request object for the WSGI environment provided. + + Do not attempt to use this class directly, instead use + :meth:`~flask.Flask.test_request_context` and + :meth:`~flask.Flask.request_context` to create this object. + + When the request context is popped, it will evaluate all the + functions registered on the application for teardown execution + (:meth:`~flask.Flask.teardown_request`). + + The request context is automatically popped at the end of the request + for you. In debug mode the request context is kept around if + exceptions happen so that interactive debuggers have a chance to + introspect the data. With 0.4 this can also be forced for requests + that did not fail and outside of ``DEBUG`` mode. By setting + ``'flask._preserve_context'`` to ``True`` on the WSGI environment the + context will not pop itself at the end of the request. This is used by + the :meth:`~flask.Flask.test_client` for example to implement the + deferred cleanup functionality. + + You might find this helpful for unittests where you need the + information from the context local around for a little longer. Make + sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in + that situation, otherwise your unittests will leak memory. + """ + + def __init__(self, app, environ, request=None, session=None): + self.app = app + if request is None: + request = app.request_class(environ) + self.request = request + self.url_adapter = None + try: + self.url_adapter = app.create_url_adapter(self.request) + except HTTPException as e: + self.request.routing_exception = e + self.flashes = None + self.session = session + + # Request contexts can be pushed multiple times and interleaved with + # other request contexts. Now only if the last level is popped we + # get rid of them. Additionally if an application context is missing + # one is created implicitly so for each level we add this information + self._implicit_app_ctx_stack = [] + + # indicator if the context was preserved. Next time another context + # is pushed the preserved context is popped. + self.preserved = False + + # remembers the exception for pop if there is one in case the context + # preservation kicks in. + self._preserved_exc = None + + # Functions that should be executed after the request on the response + # object. These will be called before the regular "after_request" + # functions. + self._after_request_functions = [] + + @property + def g(self): + return _app_ctx_stack.top.g + + @g.setter + def g(self, value): + _app_ctx_stack.top.g = value + + def copy(self): + """Creates a copy of this request context with the same request object. + This can be used to move a request context to a different greenlet. + Because the actual request object is the same this cannot be used to + move a request context to a different thread unless access to the + request object is locked. + + .. versionadded:: 0.10 + + .. versionchanged:: 1.1 + The current session object is used instead of reloading the original + data. This prevents `flask.session` pointing to an out-of-date object. + """ + return self.__class__( + self.app, + environ=self.request.environ, + request=self.request, + session=self.session, + ) + + def match_request(self): + """Can be overridden by a subclass to hook into the matching + of the request. + """ + try: + result = self.url_adapter.match(return_rule=True) + self.request.url_rule, self.request.view_args = result + except HTTPException as e: + self.request.routing_exception = e + + def push(self): + """Binds the request context to the current context.""" + # If an exception occurs in debug mode or if context preservation is + # activated under exception situations exactly one context stays + # on the stack. The rationale is that you want to access that + # information under debug situations. However if someone forgets to + # pop that context again we want to make sure that on the next push + # it's invalidated, otherwise we run at risk that something leaks + # memory. This is usually only a problem in test suite since this + # functionality is not active in production environments. + top = _request_ctx_stack.top + if top is not None and top.preserved: + top.pop(top._preserved_exc) + + # Before we push the request context we have to ensure that there + # is an application context. + app_ctx = _app_ctx_stack.top + if app_ctx is None or app_ctx.app != self.app: + app_ctx = self.app.app_context() + app_ctx.push() + self._implicit_app_ctx_stack.append(app_ctx) + else: + self._implicit_app_ctx_stack.append(None) + + if hasattr(sys, "exc_clear"): + sys.exc_clear() + + _request_ctx_stack.push(self) + + # Open the session at the moment that the request context is available. + # This allows a custom open_session method to use the request context. + # Only open a new session if this is the first time the request was + # pushed, otherwise stream_with_context loses the session. + if self.session is None: + session_interface = self.app.session_interface + self.session = session_interface.open_session(self.app, self.request) + + if self.session is None: + self.session = session_interface.make_null_session(self.app) + + if self.url_adapter is not None: + self.match_request() + + def pop(self, exc=_sentinel): + """Pops the request context and unbinds it by doing that. This will + also trigger the execution of functions registered by the + :meth:`~flask.Flask.teardown_request` decorator. + + .. versionchanged:: 0.9 + Added the `exc` argument. + """ + app_ctx = self._implicit_app_ctx_stack.pop() + + try: + clear_request = False + if not self._implicit_app_ctx_stack: + self.preserved = False + self._preserved_exc = None + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_request(exc) + + # If this interpreter supports clearing the exception information + # we do that now. This will only go into effect on Python 2.x, + # on 3.x it disappears automatically at the end of the exception + # stack. + if hasattr(sys, "exc_clear"): + sys.exc_clear() + + request_close = getattr(self.request, "close", None) + if request_close is not None: + request_close() + clear_request = True + finally: + rv = _request_ctx_stack.pop() + + # get rid of circular dependencies at the end of the request + # so that we don't require the GC to be active. + if clear_request: + rv.request.environ["werkzeug.request"] = None + + # Get rid of the app as well if necessary. + if app_ctx is not None: + app_ctx.pop(exc) + + assert rv is self, "Popped wrong request context. (%r instead of %r)" % ( + rv, + self, + ) + + def auto_pop(self, exc): + if self.request.environ.get("flask._preserve_context") or ( + exc is not None and self.app.preserve_context_on_exception + ): + self.preserved = True + self._preserved_exc = exc + else: + self.pop(exc) + + def __enter__(self): + self.push() + return self + + def __exit__(self, exc_type, exc_value, tb): + # do not pop the request stack if we are in debug mode and an + # exception happened. This will allow the debugger to still + # access the request object in the interactive shell. Furthermore + # the context can be force kept alive for the test client. + # See flask.testing for how this works. + self.auto_pop(exc_value) + + if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: + reraise(exc_type, exc_value, tb) + + def __repr__(self): + return "<%s '%s' [%s] of %s>" % ( + self.__class__.__name__, + self.request.url, + self.request.method, + self.app.name, + ) diff --git a/py3/lib/python3.6/site-packages/flask/debughelpers.py b/py3/lib/python3.6/site-packages/flask/debughelpers.py new file mode 100644 index 0000000..e475bd1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/debughelpers.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +""" + flask.debughelpers + ~~~~~~~~~~~~~~~~~~ + + Various helpers to make the development experience better. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +import os +from warnings import warn + +from ._compat import implements_to_string +from ._compat import text_type +from .app import Flask +from .blueprints import Blueprint +from .globals import _request_ctx_stack + + +class UnexpectedUnicodeError(AssertionError, UnicodeError): + """Raised in places where we want some better error reporting for + unexpected unicode or binary data. + """ + + +@implements_to_string +class DebugFilesKeyError(KeyError, AssertionError): + """Raised from request.files during debugging. The idea is that it can + provide a better error message than just a generic KeyError/BadRequest. + """ + + def __init__(self, request, key): + form_matches = request.form.getlist(key) + buf = [ + 'You tried to access the file "%s" in the request.files ' + "dictionary but it does not exist. The mimetype for the request " + 'is "%s" instead of "multipart/form-data" which means that no ' + "file contents were transmitted. To fix this error you should " + 'provide enctype="multipart/form-data" in your form.' + % (key, request.mimetype) + ] + if form_matches: + buf.append( + "\n\nThe browser instead transmitted some file names. " + "This was submitted: %s" % ", ".join('"%s"' % x for x in form_matches) + ) + self.msg = "".join(buf) + + def __str__(self): + return self.msg + + +class FormDataRoutingRedirect(AssertionError): + """This exception is raised by Flask in debug mode if it detects a + redirect caused by the routing system when the request method is not + GET, HEAD or OPTIONS. Reasoning: form data will be dropped. + """ + + def __init__(self, request): + exc = request.routing_exception + buf = [ + "A request was sent to this URL (%s) but a redirect was " + 'issued automatically by the routing system to "%s".' + % (request.url, exc.new_url) + ] + + # In case just a slash was appended we can be extra helpful + if request.base_url + "/" == exc.new_url.split("?")[0]: + buf.append( + " The URL was defined with a trailing slash so " + "Flask will automatically redirect to the URL " + "with the trailing slash if it was accessed " + "without one." + ) + + buf.append( + " Make sure to directly send your %s-request to this URL " + "since we can't make browsers or HTTP clients redirect " + "with form data reliably or without user interaction." % request.method + ) + buf.append("\n\nNote: this exception is only raised in debug mode") + AssertionError.__init__(self, "".join(buf).encode("utf-8")) + + +def attach_enctype_error_multidict(request): + """Since Flask 0.8 we're monkeypatching the files object in case a + request is detected that does not use multipart form data but the files + object is accessed. + """ + oldcls = request.files.__class__ + + class newcls(oldcls): + def __getitem__(self, key): + try: + return oldcls.__getitem__(self, key) + except KeyError: + if key not in request.form: + raise + raise DebugFilesKeyError(request, key) + + newcls.__name__ = oldcls.__name__ + newcls.__module__ = oldcls.__module__ + request.files.__class__ = newcls + + +def _dump_loader_info(loader): + yield "class: %s.%s" % (type(loader).__module__, type(loader).__name__) + for key, value in sorted(loader.__dict__.items()): + if key.startswith("_"): + continue + if isinstance(value, (tuple, list)): + if not all(isinstance(x, (str, text_type)) for x in value): + continue + yield "%s:" % key + for item in value: + yield " - %s" % item + continue + elif not isinstance(value, (str, text_type, int, float, bool)): + continue + yield "%s: %r" % (key, value) + + +def explain_template_loading_attempts(app, template, attempts): + """This should help developers understand what failed""" + info = ['Locating template "%s":' % template] + total_found = 0 + blueprint = None + reqctx = _request_ctx_stack.top + if reqctx is not None and reqctx.request.blueprint is not None: + blueprint = reqctx.request.blueprint + + for idx, (loader, srcobj, triple) in enumerate(attempts): + if isinstance(srcobj, Flask): + src_info = 'application "%s"' % srcobj.import_name + elif isinstance(srcobj, Blueprint): + src_info = 'blueprint "%s" (%s)' % (srcobj.name, srcobj.import_name) + else: + src_info = repr(srcobj) + + info.append("% 5d: trying loader of %s" % (idx + 1, src_info)) + + for line in _dump_loader_info(loader): + info.append(" %s" % line) + + if triple is None: + detail = "no match" + else: + detail = "found (%r)" % (triple[1] or "") + total_found += 1 + info.append(" -> %s" % detail) + + seems_fishy = False + if total_found == 0: + info.append("Error: the template could not be found.") + seems_fishy = True + elif total_found > 1: + info.append("Warning: multiple loaders returned a match for the template.") + seems_fishy = True + + if blueprint is not None and seems_fishy: + info.append( + " The template was looked up from an endpoint that " + 'belongs to the blueprint "%s".' % blueprint + ) + info.append(" Maybe you did not place a template in the right folder?") + info.append(" See http://flask.pocoo.org/docs/blueprints/#templates") + + app.logger.info("\n".join(info)) + + +def explain_ignored_app_run(): + if os.environ.get("WERKZEUG_RUN_MAIN") != "true": + warn( + Warning( + "Silently ignoring app.run() because the " + "application is run from the flask command line " + "executable. Consider putting app.run() behind an " + 'if __name__ == "__main__" guard to silence this ' + "warning." + ), + stacklevel=3, + ) diff --git a/py3/lib/python3.6/site-packages/flask/globals.py b/py3/lib/python3.6/site-packages/flask/globals.py new file mode 100644 index 0000000..6d32dcf --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/globals.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +""" + flask.globals + ~~~~~~~~~~~~~ + + Defines all the global objects that are proxies to the current + active context. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +from functools import partial + +from werkzeug.local import LocalProxy +from werkzeug.local import LocalStack + + +_request_ctx_err_msg = """\ +Working outside of request context. + +This typically means that you attempted to use functionality that needed +an active HTTP request. Consult the documentation on testing for +information about how to avoid this problem.\ +""" +_app_ctx_err_msg = """\ +Working outside of application context. + +This typically means that you attempted to use functionality that needed +to interface with the current application object in some way. To solve +this, set up an application context with app.app_context(). See the +documentation for more information.\ +""" + + +def _lookup_req_object(name): + top = _request_ctx_stack.top + if top is None: + raise RuntimeError(_request_ctx_err_msg) + return getattr(top, name) + + +def _lookup_app_object(name): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError(_app_ctx_err_msg) + return getattr(top, name) + + +def _find_app(): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError(_app_ctx_err_msg) + return top.app + + +# context locals +_request_ctx_stack = LocalStack() +_app_ctx_stack = LocalStack() +current_app = LocalProxy(_find_app) +request = LocalProxy(partial(_lookup_req_object, "request")) +session = LocalProxy(partial(_lookup_req_object, "session")) +g = LocalProxy(partial(_lookup_app_object, "g")) diff --git a/py3/lib/python3.6/site-packages/flask/helpers.py b/py3/lib/python3.6/site-packages/flask/helpers.py new file mode 100644 index 0000000..3f401a5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/helpers.py @@ -0,0 +1,1153 @@ +# -*- coding: utf-8 -*- +""" + flask.helpers + ~~~~~~~~~~~~~ + + Implements various helpers. + + :copyright: 2010 Pallets + :license: BSD-3-Clause +""" +import io +import mimetypes +import os +import pkgutil +import posixpath +import socket +import sys +import unicodedata +from functools import update_wrapper +from threading import RLock +from time import time +from zlib import adler32 + +from jinja2 import FileSystemLoader +from werkzeug.datastructures import Headers +from werkzeug.exceptions import BadRequest +from werkzeug.exceptions import NotFound +from werkzeug.exceptions import RequestedRangeNotSatisfiable +from werkzeug.routing import BuildError +from werkzeug.urls import url_quote +from werkzeug.wsgi import wrap_file + +from ._compat import fspath +from ._compat import PY2 +from ._compat import string_types +from ._compat import text_type +from .globals import _app_ctx_stack +from .globals import _request_ctx_stack +from .globals import current_app +from .globals import request +from .globals import session +from .signals import message_flashed + +# sentinel +_missing = object() + + +# what separators does this operating system provide that are not a slash? +# this is used by the send_from_directory function to ensure that nobody is +# able to access files from outside the filesystem. +_os_alt_seps = list( + sep for sep in [os.path.sep, os.path.altsep] if sep not in (None, "/") +) + + +def get_env(): + """Get the environment the app is running in, indicated by the + :envvar:`FLASK_ENV` environment variable. The default is + ``'production'``. + """ + return os.environ.get("FLASK_ENV") or "production" + + +def get_debug_flag(): + """Get whether debug mode should be enabled for the app, indicated + by the :envvar:`FLASK_DEBUG` environment variable. The default is + ``True`` if :func:`.get_env` returns ``'development'``, or ``False`` + otherwise. + """ + val = os.environ.get("FLASK_DEBUG") + + if not val: + return get_env() == "development" + + return val.lower() not in ("0", "false", "no") + + +def get_load_dotenv(default=True): + """Get whether the user has disabled loading dotenv files by setting + :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load the + files. + + :param default: What to return if the env var isn't set. + """ + val = os.environ.get("FLASK_SKIP_DOTENV") + + if not val: + return default + + return val.lower() in ("0", "false", "no") + + +def _endpoint_from_view_func(view_func): + """Internal helper that returns the default endpoint for a given + function. This always is the function name. + """ + assert view_func is not None, "expected view func if endpoint is not provided." + return view_func.__name__ + + +def stream_with_context(generator_or_function): + """Request contexts disappear when the response is started on the server. + This is done for efficiency reasons and to make it less likely to encounter + memory leaks with badly written WSGI middlewares. The downside is that if + you are using streamed responses, the generator cannot access request bound + information any more. + + This function however can help you keep the context around for longer:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + @stream_with_context + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(generate()) + + Alternatively it can also be used around a specific generator:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(stream_with_context(generate())) + + .. versionadded:: 0.9 + """ + try: + gen = iter(generator_or_function) + except TypeError: + + def decorator(*args, **kwargs): + gen = generator_or_function(*args, **kwargs) + return stream_with_context(gen) + + return update_wrapper(decorator, generator_or_function) + + def generator(): + ctx = _request_ctx_stack.top + if ctx is None: + raise RuntimeError( + "Attempted to stream with context but " + "there was no context in the first place to keep around." + ) + with ctx: + # Dummy sentinel. Has to be inside the context block or we're + # not actually keeping the context around. + yield None + + # The try/finally is here so that if someone passes a WSGI level + # iterator in we're still running the cleanup logic. Generators + # don't need that because they are closed on their destruction + # automatically. + try: + for item in gen: + yield item + finally: + if hasattr(gen, "close"): + gen.close() + + # The trick is to start the generator. Then the code execution runs until + # the first dummy None is yielded at which point the context was already + # pushed. This item is discarded. Then when the iteration continues the + # real generator is executed. + wrapped_g = generator() + next(wrapped_g) + return wrapped_g + + +def make_response(*args): + """Sometimes it is necessary to set additional headers in a view. Because + views do not have to return response objects but can return a value that + is converted into a response object by Flask itself, it becomes tricky to + add headers to it. This function can be called instead of using a return + and you will get a response object which you can use to attach headers. + + If view looked like this and you want to add a new header:: + + def index(): + return render_template('index.html', foo=42) + + You can now do something like this:: + + def index(): + response = make_response(render_template('index.html', foo=42)) + response.headers['X-Parachutes'] = 'parachutes are cool' + return response + + This function accepts the very same arguments you can return from a + view function. This for example creates a response with a 404 error + code:: + + response = make_response(render_template('not_found.html'), 404) + + The other use case of this function is to force the return value of a + view function into a response which is helpful with view + decorators:: + + response = make_response(view_function()) + response.headers['X-Parachutes'] = 'parachutes are cool' + + Internally this function does the following things: + + - if no arguments are passed, it creates a new response argument + - if one argument is passed, :meth:`flask.Flask.make_response` + is invoked with it. + - if more than one argument is passed, the arguments are passed + to the :meth:`flask.Flask.make_response` function as tuple. + + .. versionadded:: 0.6 + """ + if not args: + return current_app.response_class() + if len(args) == 1: + args = args[0] + return current_app.make_response(args) + + +def url_for(endpoint, **values): + """Generates a URL to the given endpoint with the method provided. + + Variable arguments that are unknown to the target endpoint are appended + to the generated URL as query arguments. If the value of a query argument + is ``None``, the whole pair is skipped. In case blueprints are active + you can shortcut references to the same blueprint by prefixing the + local endpoint with a dot (``.``). + + This will reference the index function local to the current blueprint:: + + url_for('.index') + + For more information, head over to the :ref:`Quickstart `. + + Configuration values ``APPLICATION_ROOT`` and ``SERVER_NAME`` are only used when + generating URLs outside of a request context. + + To integrate applications, :class:`Flask` has a hook to intercept URL build + errors through :attr:`Flask.url_build_error_handlers`. The `url_for` + function results in a :exc:`~werkzeug.routing.BuildError` when the current + app does not have a URL for the given endpoint and values. When it does, the + :data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if + it is not ``None``, which can return a string to use as the result of + `url_for` (instead of `url_for`'s default to raise the + :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. + An example:: + + def external_url_handler(error, endpoint, values): + "Looks up an external URL when `url_for` cannot build a URL." + # This is an example of hooking the build_error_handler. + # Here, lookup_url is some utility function you've built + # which looks up the endpoint in some external URL registry. + url = lookup_url(endpoint, **values) + if url is None: + # External lookup did not have a URL. + # Re-raise the BuildError, in context of original traceback. + exc_type, exc_value, tb = sys.exc_info() + if exc_value is error: + raise exc_type, exc_value, tb + else: + raise error + # url_for will use this result, instead of raising BuildError. + return url + + app.url_build_error_handlers.append(external_url_handler) + + Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and + `endpoint` and `values` are the arguments passed into `url_for`. Note + that this is for building URLs outside the current application, and not for + handling 404 NotFound errors. + + .. versionadded:: 0.10 + The `_scheme` parameter was added. + + .. versionadded:: 0.9 + The `_anchor` and `_method` parameters were added. + + .. versionadded:: 0.9 + Calls :meth:`Flask.handle_build_error` on + :exc:`~werkzeug.routing.BuildError`. + + :param endpoint: the endpoint of the URL (name of the function) + :param values: the variable arguments of the URL rule + :param _external: if set to ``True``, an absolute URL is generated. Server + address can be changed via ``SERVER_NAME`` configuration variable which + falls back to the `Host` header, then to the IP and port of the request. + :param _scheme: a string specifying the desired URL scheme. The `_external` + parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default + behavior uses the same scheme as the current request, or + ``PREFERRED_URL_SCHEME`` from the :ref:`app configuration ` if no + request context is available. As of Werkzeug 0.10, this also can be set + to an empty string to build protocol-relative URLs. + :param _anchor: if provided this is added as anchor to the URL. + :param _method: if provided this explicitly specifies an HTTP method. + """ + appctx = _app_ctx_stack.top + reqctx = _request_ctx_stack.top + + if appctx is None: + raise RuntimeError( + "Attempted to generate a URL without the application context being" + " pushed. This has to be executed when application context is" + " available." + ) + + # If request specific information is available we have some extra + # features that support "relative" URLs. + if reqctx is not None: + url_adapter = reqctx.url_adapter + blueprint_name = request.blueprint + + if endpoint[:1] == ".": + if blueprint_name is not None: + endpoint = blueprint_name + endpoint + else: + endpoint = endpoint[1:] + + external = values.pop("_external", False) + + # Otherwise go with the url adapter from the appctx and make + # the URLs external by default. + else: + url_adapter = appctx.url_adapter + + if url_adapter is None: + raise RuntimeError( + "Application was not able to create a URL adapter for request" + " independent URL generation. You might be able to fix this by" + " setting the SERVER_NAME config variable." + ) + + external = values.pop("_external", True) + + anchor = values.pop("_anchor", None) + method = values.pop("_method", None) + scheme = values.pop("_scheme", None) + appctx.app.inject_url_defaults(endpoint, values) + + # This is not the best way to deal with this but currently the + # underlying Werkzeug router does not support overriding the scheme on + # a per build call basis. + old_scheme = None + if scheme is not None: + if not external: + raise ValueError("When specifying _scheme, _external must be True") + old_scheme = url_adapter.url_scheme + url_adapter.url_scheme = scheme + + try: + try: + rv = url_adapter.build( + endpoint, values, method=method, force_external=external + ) + finally: + if old_scheme is not None: + url_adapter.url_scheme = old_scheme + except BuildError as error: + # We need to inject the values again so that the app callback can + # deal with that sort of stuff. + values["_external"] = external + values["_anchor"] = anchor + values["_method"] = method + values["_scheme"] = scheme + return appctx.app.handle_url_build_error(error, endpoint, values) + + if anchor is not None: + rv += "#" + url_quote(anchor) + return rv + + +def get_template_attribute(template_name, attribute): + """Loads a macro (or variable) a template exports. This can be used to + invoke a macro from within Python code. If you for example have a + template named :file:`_cider.html` with the following contents: + + .. sourcecode:: html+jinja + + {% macro hello(name) %}Hello {{ name }}!{% endmacro %} + + You can access this from Python code like this:: + + hello = get_template_attribute('_cider.html', 'hello') + return hello('World') + + .. versionadded:: 0.2 + + :param template_name: the name of the template + :param attribute: the name of the variable of macro to access + """ + return getattr(current_app.jinja_env.get_template(template_name).module, attribute) + + +def flash(message, category="message"): + """Flashes a message to the next request. In order to remove the + flashed message from the session and to display it to the user, + the template has to call :func:`get_flashed_messages`. + + .. versionchanged:: 0.3 + `category` parameter added. + + :param message: the message to be flashed. + :param category: the category for the message. The following values + are recommended: ``'message'`` for any kind of message, + ``'error'`` for errors, ``'info'`` for information + messages and ``'warning'`` for warnings. However any + kind of string can be used as category. + """ + # Original implementation: + # + # session.setdefault('_flashes', []).append((category, message)) + # + # This assumed that changes made to mutable structures in the session are + # always in sync with the session object, which is not true for session + # implementations that use external storage for keeping their keys/values. + flashes = session.get("_flashes", []) + flashes.append((category, message)) + session["_flashes"] = flashes + message_flashed.send( + current_app._get_current_object(), message=message, category=category + ) + + +def get_flashed_messages(with_categories=False, category_filter=()): + """Pulls all flashed messages from the session and returns them. + Further calls in the same request to the function will return + the same messages. By default just the messages are returned, + but when `with_categories` is set to ``True``, the return value will + be a list of tuples in the form ``(category, message)`` instead. + + Filter the flashed messages to one or more categories by providing those + categories in `category_filter`. This allows rendering categories in + separate html blocks. The `with_categories` and `category_filter` + arguments are distinct: + + * `with_categories` controls whether categories are returned with message + text (``True`` gives a tuple, where ``False`` gives just the message text). + * `category_filter` filters the messages down to only those matching the + provided categories. + + See :ref:`message-flashing-pattern` for examples. + + .. versionchanged:: 0.3 + `with_categories` parameter added. + + .. versionchanged:: 0.9 + `category_filter` parameter added. + + :param with_categories: set to ``True`` to also receive categories. + :param category_filter: whitelist of categories to limit return values + """ + flashes = _request_ctx_stack.top.flashes + if flashes is None: + _request_ctx_stack.top.flashes = flashes = ( + session.pop("_flashes") if "_flashes" in session else [] + ) + if category_filter: + flashes = list(filter(lambda f: f[0] in category_filter, flashes)) + if not with_categories: + return [x[1] for x in flashes] + return flashes + + +def send_file( + filename_or_fp, + mimetype=None, + as_attachment=False, + attachment_filename=None, + add_etags=True, + cache_timeout=None, + conditional=False, + last_modified=None, +): + """Sends the contents of a file to the client. This will use the + most efficient method available and configured. By default it will + try to use the WSGI server's file_wrapper support. Alternatively + you can set the application's :attr:`~Flask.use_x_sendfile` attribute + to ``True`` to directly emit an ``X-Sendfile`` header. This however + requires support of the underlying webserver for ``X-Sendfile``. + + By default it will try to guess the mimetype for you, but you can + also explicitly provide one. For extra security you probably want + to send certain files as attachment (HTML for instance). The mimetype + guessing requires a `filename` or an `attachment_filename` to be + provided. + + ETags will also be attached automatically if a `filename` is provided. You + can turn this off by setting `add_etags=False`. + + If `conditional=True` and `filename` is provided, this method will try to + upgrade the response stream to support range requests. This will allow + the request to be answered with partial content response. + + Please never pass filenames to this function from user sources; + you should use :func:`send_from_directory` instead. + + .. versionadded:: 0.2 + + .. versionadded:: 0.5 + The `add_etags`, `cache_timeout` and `conditional` parameters were + added. The default behavior is now to attach etags. + + .. versionchanged:: 0.7 + mimetype guessing and etag support for file objects was + deprecated because it was unreliable. Pass a filename if you are + able to, otherwise attach an etag yourself. This functionality + will be removed in Flask 1.0 + + .. versionchanged:: 0.9 + cache_timeout pulls its default from application config, when None. + + .. versionchanged:: 0.12 + The filename is no longer automatically inferred from file objects. If + you want to use automatic mimetype and etag support, pass a filepath via + `filename_or_fp` or `attachment_filename`. + + .. versionchanged:: 0.12 + The `attachment_filename` is preferred over `filename` for MIME-type + detection. + + .. versionchanged:: 1.0 + UTF-8 filenames, as specified in `RFC 2231`_, are supported. + + .. _RFC 2231: https://tools.ietf.org/html/rfc2231#section-4 + + .. versionchanged:: 1.0.3 + Filenames are encoded with ASCII instead of Latin-1 for broader + compatibility with WSGI servers. + + .. versionchanged:: 1.1 + Filename may be a :class:`~os.PathLike` object. + + .. versionadded:: 1.1 + Partial content supports :class:`~io.BytesIO`. + + :param filename_or_fp: the filename of the file to send. + This is relative to the :attr:`~Flask.root_path` + if a relative path is specified. + Alternatively a file object might be provided in + which case ``X-Sendfile`` might not work and fall + back to the traditional method. Make sure that the + file pointer is positioned at the start of data to + send before calling :func:`send_file`. + :param mimetype: the mimetype of the file if provided. If a file path is + given, auto detection happens as fallback, otherwise an + error will be raised. + :param as_attachment: set to ``True`` if you want to send this file with + a ``Content-Disposition: attachment`` header. + :param attachment_filename: the filename for the attachment if it + differs from the file's filename. + :param add_etags: set to ``False`` to disable attaching of etags. + :param conditional: set to ``True`` to enable conditional responses. + + :param cache_timeout: the timeout in seconds for the headers. When ``None`` + (default), this value is set by + :meth:`~Flask.get_send_file_max_age` of + :data:`~flask.current_app`. + :param last_modified: set the ``Last-Modified`` header to this value, + a :class:`~datetime.datetime` or timestamp. + If a file was passed, this overrides its mtime. + """ + mtime = None + fsize = None + + if hasattr(filename_or_fp, "__fspath__"): + filename_or_fp = fspath(filename_or_fp) + + if isinstance(filename_or_fp, string_types): + filename = filename_or_fp + if not os.path.isabs(filename): + filename = os.path.join(current_app.root_path, filename) + file = None + if attachment_filename is None: + attachment_filename = os.path.basename(filename) + else: + file = filename_or_fp + filename = None + + if mimetype is None: + if attachment_filename is not None: + mimetype = ( + mimetypes.guess_type(attachment_filename)[0] + or "application/octet-stream" + ) + + if mimetype is None: + raise ValueError( + "Unable to infer MIME-type because no filename is available. " + "Please set either `attachment_filename`, pass a filepath to " + "`filename_or_fp` or set your own MIME-type via `mimetype`." + ) + + headers = Headers() + if as_attachment: + if attachment_filename is None: + raise TypeError("filename unavailable, required for sending as attachment") + + if not isinstance(attachment_filename, text_type): + attachment_filename = attachment_filename.decode("utf-8") + + try: + attachment_filename = attachment_filename.encode("ascii") + except UnicodeEncodeError: + filenames = { + "filename": unicodedata.normalize("NFKD", attachment_filename).encode( + "ascii", "ignore" + ), + "filename*": "UTF-8''%s" % url_quote(attachment_filename, safe=b""), + } + else: + filenames = {"filename": attachment_filename} + + headers.add("Content-Disposition", "attachment", **filenames) + + if current_app.use_x_sendfile and filename: + if file is not None: + file.close() + headers["X-Sendfile"] = filename + fsize = os.path.getsize(filename) + headers["Content-Length"] = fsize + data = None + else: + if file is None: + file = open(filename, "rb") + mtime = os.path.getmtime(filename) + fsize = os.path.getsize(filename) + headers["Content-Length"] = fsize + elif isinstance(file, io.BytesIO): + try: + fsize = file.getbuffer().nbytes + except AttributeError: + # Python 2 doesn't have getbuffer + fsize = len(file.getvalue()) + headers["Content-Length"] = fsize + data = wrap_file(request.environ, file) + + rv = current_app.response_class( + data, mimetype=mimetype, headers=headers, direct_passthrough=True + ) + + if last_modified is not None: + rv.last_modified = last_modified + elif mtime is not None: + rv.last_modified = mtime + + rv.cache_control.public = True + if cache_timeout is None: + cache_timeout = current_app.get_send_file_max_age(filename) + if cache_timeout is not None: + rv.cache_control.max_age = cache_timeout + rv.expires = int(time() + cache_timeout) + + if add_etags and filename is not None: + from warnings import warn + + try: + rv.set_etag( + "%s-%s-%s" + % ( + os.path.getmtime(filename), + os.path.getsize(filename), + adler32( + filename.encode("utf-8") + if isinstance(filename, text_type) + else filename + ) + & 0xFFFFFFFF, + ) + ) + except OSError: + warn( + "Access %s failed, maybe it does not exist, so ignore etags in " + "headers" % filename, + stacklevel=2, + ) + + if conditional: + try: + rv = rv.make_conditional(request, accept_ranges=True, complete_length=fsize) + except RequestedRangeNotSatisfiable: + if file is not None: + file.close() + raise + # make sure we don't send x-sendfile for servers that + # ignore the 304 status code for x-sendfile. + if rv.status_code == 304: + rv.headers.pop("x-sendfile", None) + return rv + + +def safe_join(directory, *pathnames): + """Safely join `directory` and zero or more untrusted `pathnames` + components. + + Example usage:: + + @app.route('/wiki/') + def wiki_page(filename): + filename = safe_join(app.config['WIKI_FOLDER'], filename) + with open(filename, 'rb') as fd: + content = fd.read() # Read and process the file content... + + :param directory: the trusted base directory. + :param pathnames: the untrusted pathnames relative to that directory. + :raises: :class:`~werkzeug.exceptions.NotFound` if one or more passed + paths fall out of its boundaries. + """ + + parts = [directory] + + for filename in pathnames: + if filename != "": + filename = posixpath.normpath(filename) + + if ( + any(sep in filename for sep in _os_alt_seps) + or os.path.isabs(filename) + or filename == ".." + or filename.startswith("../") + ): + raise NotFound() + + parts.append(filename) + + return posixpath.join(*parts) + + +def send_from_directory(directory, filename, **options): + """Send a file from a given directory with :func:`send_file`. This + is a secure way to quickly expose static files from an upload folder + or something similar. + + Example usage:: + + @app.route('/uploads/') + def download_file(filename): + return send_from_directory(app.config['UPLOAD_FOLDER'], + filename, as_attachment=True) + + .. admonition:: Sending files and Performance + + It is strongly recommended to activate either ``X-Sendfile`` support in + your webserver or (if no authentication happens) to tell the webserver + to serve files for the given path on its own without calling into the + web application for improved performance. + + .. versionadded:: 0.5 + + :param directory: the directory where all the files are stored. + :param filename: the filename relative to that directory to + download. + :param options: optional keyword arguments that are directly + forwarded to :func:`send_file`. + """ + filename = fspath(filename) + directory = fspath(directory) + filename = safe_join(directory, filename) + if not os.path.isabs(filename): + filename = os.path.join(current_app.root_path, filename) + try: + if not os.path.isfile(filename): + raise NotFound() + except (TypeError, ValueError): + raise BadRequest() + options.setdefault("conditional", True) + return send_file(filename, **options) + + +def get_root_path(import_name): + """Returns the path to a package or cwd if that cannot be found. This + returns the path of a package or the folder that contains a module. + + Not to be confused with the package path returned by :func:`find_package`. + """ + # Module already imported and has a file attribute. Use that first. + mod = sys.modules.get(import_name) + if mod is not None and hasattr(mod, "__file__"): + return os.path.dirname(os.path.abspath(mod.__file__)) + + # Next attempt: check the loader. + loader = pkgutil.get_loader(import_name) + + # Loader does not exist or we're referring to an unloaded main module + # or a main module without path (interactive sessions), go with the + # current working directory. + if loader is None or import_name == "__main__": + return os.getcwd() + + # For .egg, zipimporter does not have get_filename until Python 2.7. + # Some other loaders might exhibit the same behavior. + if hasattr(loader, "get_filename"): + filepath = loader.get_filename(import_name) + else: + # Fall back to imports. + __import__(import_name) + mod = sys.modules[import_name] + filepath = getattr(mod, "__file__", None) + + # If we don't have a filepath it might be because we are a + # namespace package. In this case we pick the root path from the + # first module that is contained in our package. + if filepath is None: + raise RuntimeError( + "No root path can be found for the provided " + 'module "%s". This can happen because the ' + "module came from an import hook that does " + "not provide file name information or because " + "it's a namespace package. In this case " + "the root path needs to be explicitly " + "provided." % import_name + ) + + # filepath is import_name.py for a module, or __init__.py for a package. + return os.path.dirname(os.path.abspath(filepath)) + + +def _matching_loader_thinks_module_is_package(loader, mod_name): + """Given the loader that loaded a module and the module this function + attempts to figure out if the given module is actually a package. + """ + # If the loader can tell us if something is a package, we can + # directly ask the loader. + if hasattr(loader, "is_package"): + return loader.is_package(mod_name) + # importlib's namespace loaders do not have this functionality but + # all the modules it loads are packages, so we can take advantage of + # this information. + elif ( + loader.__class__.__module__ == "_frozen_importlib" + and loader.__class__.__name__ == "NamespaceLoader" + ): + return True + # Otherwise we need to fail with an error that explains what went + # wrong. + raise AttributeError( + ( + "%s.is_package() method is missing but is required by Flask of " + "PEP 302 import hooks. If you do not use import hooks and " + "you encounter this error please file a bug against Flask." + ) + % loader.__class__.__name__ + ) + + +def _find_package_path(root_mod_name): + """Find the path where the module's root exists in""" + if sys.version_info >= (3, 4): + import importlib.util + + try: + spec = importlib.util.find_spec(root_mod_name) + if spec is None: + raise ValueError("not found") + # ImportError: the machinery told us it does not exist + # ValueError: + # - the module name was invalid + # - the module name is __main__ + # - *we* raised `ValueError` due to `spec` being `None` + except (ImportError, ValueError): + pass # handled below + else: + # namespace package + if spec.origin in {"namespace", None}: + return os.path.dirname(next(iter(spec.submodule_search_locations))) + # a package (with __init__.py) + elif spec.submodule_search_locations: + return os.path.dirname(os.path.dirname(spec.origin)) + # just a normal module + else: + return os.path.dirname(spec.origin) + + # we were unable to find the `package_path` using PEP 451 loaders + loader = pkgutil.get_loader(root_mod_name) + if loader is None or root_mod_name == "__main__": + # import name is not found, or interactive/main module + return os.getcwd() + else: + # For .egg, zipimporter does not have get_filename until Python 2.7. + if hasattr(loader, "get_filename"): + filename = loader.get_filename(root_mod_name) + elif hasattr(loader, "archive"): + # zipimporter's loader.archive points to the .egg or .zip + # archive filename is dropped in call to dirname below. + filename = loader.archive + else: + # At least one loader is missing both get_filename and archive: + # Google App Engine's HardenedModulesHook + # + # Fall back to imports. + __import__(root_mod_name) + filename = sys.modules[root_mod_name].__file__ + package_path = os.path.abspath(os.path.dirname(filename)) + + # In case the root module is a package we need to chop of the + # rightmost part. This needs to go through a helper function + # because of python 3.3 namespace packages. + if _matching_loader_thinks_module_is_package(loader, root_mod_name): + package_path = os.path.dirname(package_path) + + return package_path + + +def find_package(import_name): + """Finds a package and returns the prefix (or None if the package is + not installed) as well as the folder that contains the package or + module as a tuple. The package path returned is the module that would + have to be added to the pythonpath in order to make it possible to + import the module. The prefix is the path below which a UNIX like + folder structure exists (lib, share etc.). + """ + root_mod_name, _, _ = import_name.partition(".") + package_path = _find_package_path(root_mod_name) + site_parent, site_folder = os.path.split(package_path) + py_prefix = os.path.abspath(sys.prefix) + if package_path.startswith(py_prefix): + return py_prefix, package_path + elif site_folder.lower() == "site-packages": + parent, folder = os.path.split(site_parent) + # Windows like installations + if folder.lower() == "lib": + base_dir = parent + # UNIX like installations + elif os.path.basename(parent).lower() == "lib": + base_dir = os.path.dirname(parent) + else: + base_dir = site_parent + return base_dir, package_path + return None, package_path + + +class locked_cached_property(object): + """A decorator that converts a function into a lazy property. The + function wrapped is called the first time to retrieve the result + and then that calculated result is used the next time you access + the value. Works like the one in Werkzeug but has a lock for + thread safety. + """ + + def __init__(self, func, name=None, doc=None): + self.__name__ = name or func.__name__ + self.__module__ = func.__module__ + self.__doc__ = doc or func.__doc__ + self.func = func + self.lock = RLock() + + def __get__(self, obj, type=None): + if obj is None: + return self + with self.lock: + value = obj.__dict__.get(self.__name__, _missing) + if value is _missing: + value = self.func(obj) + obj.__dict__[self.__name__] = value + return value + + +class _PackageBoundObject(object): + #: The name of the package or module that this app belongs to. Do not + #: change this once it is set by the constructor. + import_name = None + + #: Location of the template files to be added to the template lookup. + #: ``None`` if templates should not be added. + template_folder = None + + #: Absolute path to the package on the filesystem. Used to look up + #: resources contained in the package. + root_path = None + + def __init__(self, import_name, template_folder=None, root_path=None): + self.import_name = import_name + self.template_folder = template_folder + + if root_path is None: + root_path = get_root_path(self.import_name) + + self.root_path = root_path + self._static_folder = None + self._static_url_path = None + + # circular import + from .cli import AppGroup + + #: The Click command group for registration of CLI commands + #: on the application and associated blueprints. These commands + #: are accessible via the :command:`flask` command once the + #: application has been discovered and blueprints registered. + self.cli = AppGroup() + + @property + def static_folder(self): + """The absolute path to the configured static folder.""" + if self._static_folder is not None: + return os.path.join(self.root_path, self._static_folder) + + @static_folder.setter + def static_folder(self, value): + self._static_folder = value + + @property + def static_url_path(self): + """The URL prefix that the static route will be accessible from. + + If it was not configured during init, it is derived from + :attr:`static_folder`. + """ + if self._static_url_path is not None: + return self._static_url_path + + if self.static_folder is not None: + basename = os.path.basename(self.static_folder) + return ("/" + basename).rstrip("/") + + @static_url_path.setter + def static_url_path(self, value): + if value is not None: + value = value.rstrip("/") + + self._static_url_path = value + + @property + def has_static_folder(self): + """This is ``True`` if the package bound object's container has a + folder for static files. + + .. versionadded:: 0.5 + """ + return self.static_folder is not None + + @locked_cached_property + def jinja_loader(self): + """The Jinja loader for this package bound object. + + .. versionadded:: 0.5 + """ + if self.template_folder is not None: + return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) + + def get_send_file_max_age(self, filename): + """Provides default cache_timeout for the :func:`send_file` functions. + + By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from + the configuration of :data:`~flask.current_app`. + + Static file functions such as :func:`send_from_directory` use this + function, and :func:`send_file` calls this function on + :data:`~flask.current_app` when the given cache_timeout is ``None``. If a + cache_timeout is given in :func:`send_file`, that timeout is used; + otherwise, this method is called. + + This allows subclasses to change the behavior when sending files based + on the filename. For example, to set the cache timeout for .js files + to 60 seconds:: + + class MyFlask(flask.Flask): + def get_send_file_max_age(self, name): + if name.lower().endswith('.js'): + return 60 + return flask.Flask.get_send_file_max_age(self, name) + + .. versionadded:: 0.9 + """ + return total_seconds(current_app.send_file_max_age_default) + + def send_static_file(self, filename): + """Function used internally to send static files from the static + folder to the browser. + + .. versionadded:: 0.5 + """ + if not self.has_static_folder: + raise RuntimeError("No static folder for this object") + # Ensure get_send_file_max_age is called in all cases. + # Here, we ensure get_send_file_max_age is called for Blueprints. + cache_timeout = self.get_send_file_max_age(filename) + return send_from_directory( + self.static_folder, filename, cache_timeout=cache_timeout + ) + + def open_resource(self, resource, mode="rb"): + """Opens a resource from the application's resource folder. To see + how this works, consider the following folder structure:: + + /myapplication.py + /schema.sql + /static + /style.css + /templates + /layout.html + /index.html + + If you want to open the :file:`schema.sql` file you would do the + following:: + + with app.open_resource('schema.sql') as f: + contents = f.read() + do_something_with(contents) + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: Open file in this mode. Only reading is supported, + valid values are "r" (or "rt") and "rb". + """ + if mode not in {"r", "rt", "rb"}: + raise ValueError("Resources can only be opened for reading") + + return open(os.path.join(self.root_path, resource), mode) + + +def total_seconds(td): + """Returns the total seconds from a timedelta object. + + :param timedelta td: the timedelta to be converted in seconds + + :returns: number of seconds + :rtype: int + """ + return td.days * 60 * 60 * 24 + td.seconds + + +def is_ip(value): + """Determine if the given string is an IP address. + + Python 2 on Windows doesn't provide ``inet_pton``, so this only + checks IPv4 addresses in that environment. + + :param value: value to check + :type value: str + + :return: True if string is an IP address + :rtype: bool + """ + if PY2 and os.name == "nt": + try: + socket.inet_aton(value) + return True + except socket.error: + return False + + for family in (socket.AF_INET, socket.AF_INET6): + try: + socket.inet_pton(family, value) + except socket.error: + pass + else: + return True + + return False diff --git a/py3/lib/python3.6/site-packages/flask/json/__init__.py b/py3/lib/python3.6/site-packages/flask/json/__init__.py new file mode 100644 index 0000000..a141068 --- /dev/null +++ b/py3/lib/python3.6/site-packages/flask/json/__init__.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +""" +flask.json +~~~~~~~~~~ + +:copyright: 2010 Pallets +:license: BSD-3-Clause +""" +import codecs +import io +import uuid +from datetime import date +from datetime import datetime + +from itsdangerous import json as _json +from jinja2 import Markup +from werkzeug.http import http_date + +from .._compat import PY2 +from .._compat import text_type +from ..globals import current_app +from ..globals import request + +try: + import dataclasses +except ImportError: + dataclasses = None + +# Figure out if simplejson escapes slashes. This behavior was changed +# from one version to another without reason. +_slash_escape = "\\/" not in _json.dumps("/") + + +__all__ = [ + "dump", + "dumps", + "load", + "loads", + "htmlsafe_dump", + "htmlsafe_dumps", + "JSONDecoder", + "JSONEncoder", + "jsonify", +] + + +def _wrap_reader_for_text(fp, encoding): + if isinstance(fp.read(0), bytes): + fp = io.TextIOWrapper(io.BufferedReader(fp), encoding) + return fp + + +def _wrap_writer_for_text(fp, encoding): + try: + fp.write("") + except TypeError: + fp = io.TextIOWrapper(fp, encoding) + return fp + + +class JSONEncoder(_json.JSONEncoder): + """The default Flask JSON encoder. This one extends the default + encoder by also supporting ``datetime``, ``UUID``, ``dataclasses``, + and ``Markup`` objects. + + ``datetime`` objects are serialized as RFC 822 datetime strings. + This is the same as the HTTP date format. + + In order to support more data types, override the :meth:`default` + method. + """ + + def default(self, o): + """Implement this method in a subclass such that it returns a + serializable object for ``o``, or calls the base implementation (to + raise a :exc:`TypeError`). + + For example, to support arbitrary iterators, you could implement + default like this:: + + def default(self, o): + try: + iterable = iter(o) + except TypeError: + pass + else: + return list(iterable) + return JSONEncoder.default(self, o) + """ + if isinstance(o, datetime): + return http_date(o.utctimetuple()) + if isinstance(o, date): + return http_date(o.timetuple()) + if isinstance(o, uuid.UUID): + return str(o) + if dataclasses and dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + if hasattr(o, "__html__"): + return text_type(o.__html__()) + return _json.JSONEncoder.default(self, o) + + +class JSONDecoder(_json.JSONDecoder): + """The default JSON decoder. This one does not change the behavior from + the default simplejson decoder. Consult the :mod:`json` documentation + for more information. This decoder is not only used for the load + functions of this module but also :attr:`~flask.Request`. + """ + + +def _dump_arg_defaults(kwargs, app=None): + """Inject default arguments for dump functions.""" + if app is None: + app = current_app + + if app: + bp = app.blueprints.get(request.blueprint) if request else None + kwargs.setdefault( + "cls", bp.json_encoder if bp and bp.json_encoder else app.json_encoder + ) + + if not app.config["JSON_AS_ASCII"]: + kwargs.setdefault("ensure_ascii", False) + + kwargs.setdefault("sort_keys", app.config["JSON_SORT_KEYS"]) + else: + kwargs.setdefault("sort_keys", True) + kwargs.setdefault("cls", JSONEncoder) + + +def _load_arg_defaults(kwargs, app=None): + """Inject default arguments for load functions.""" + if app is None: + app = current_app + + if app: + bp = app.blueprints.get(request.blueprint) if request else None + kwargs.setdefault( + "cls", bp.json_decoder if bp and bp.json_decoder else app.json_decoder + ) + else: + kwargs.setdefault("cls", JSONDecoder) + + +def detect_encoding(data): + """Detect which UTF codec was used to encode the given bytes. + + The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is + accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big + or little endian. Some editors or libraries may prepend a BOM. + + :param data: Bytes in unknown UTF encoding. + :return: UTF encoding name + """ + head = data[:4] + + if head[:3] == codecs.BOM_UTF8: + return "utf-8-sig" + + if b"\x00" not in head: + return "utf-8" + + if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE): + return "utf-32" + + if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE): + return "utf-16" + + if len(head) == 4: + if head[:3] == b"\x00\x00\x00": + return "utf-32-be" + + if head[::2] == b"\x00\x00": + return "utf-16-be" + + if head[1:] == b"\x00\x00\x00": + return "utf-32-le" + + if head[1::2] == b"\x00\x00": + return "utf-16-le" + + if len(head) == 2: + return "utf-16-be" if head.startswith(b"\x00") else "utf-16-le" + + return "utf-8" + + +def dumps(obj, app=None, **kwargs): + """Serialize ``obj`` to a JSON-formatted string. If there is an + app context pushed, use the current app's configured encoder + (:attr:`~flask.Flask.json_encoder`), or fall back to the default + :class:`JSONEncoder`. + + Takes the same arguments as the built-in :func:`json.dumps`, and + does some extra configuration based on the application. If the + simplejson package is installed, it is preferred. + + :param obj: Object to serialize to JSON. + :param app: App instance to use to configure the JSON encoder. + Uses ``current_app`` if not given, and falls back to the default + encoder when not in an app context. + :param kwargs: Extra arguments passed to :func:`json.dumps`. + + .. versionchanged:: 1.0.3 + + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + _dump_arg_defaults(kwargs, app=app) + encoding = kwargs.pop("encoding", None) + rv = _json.dumps(obj, **kwargs) + if encoding is not None and isinstance(rv, text_type): + rv = rv.encode(encoding) + return rv + + +def dump(obj, fp, app=None, **kwargs): + """Like :func:`dumps` but writes into a file object.""" + _dump_arg_defaults(kwargs, app=app) + encoding = kwargs.pop("encoding", None) + if encoding is not None: + fp = _wrap_writer_for_text(fp, encoding) + _json.dump(obj, fp, **kwargs) + + +def loads(s, app=None, **kwargs): + """Deserialize an object from a JSON-formatted string ``s``. If + there is an app context pushed, use the current app's configured + decoder (:attr:`~flask.Flask.json_decoder`), or fall back to the + default :class:`JSONDecoder`. + + Takes the same arguments as the built-in :func:`json.loads`, and + does some extra configuration based on the application. If the + simplejson package is installed, it is preferred. + + :param s: JSON string to deserialize. + :param app: App instance to use to configure the JSON decoder. + Uses ``current_app`` if not given, and falls back to the default + encoder when not in an app context. + :param kwargs: Extra arguments passed to :func:`json.dumps`. + + .. versionchanged:: 1.0.3 + + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + _load_arg_defaults(kwargs, app=app) + if isinstance(s, bytes): + encoding = kwargs.pop("encoding", None) + if encoding is None: + encoding = detect_encoding(s) + s = s.decode(encoding) + return _json.loads(s, **kwargs) + + +def load(fp, app=None, **kwargs): + """Like :func:`loads` but reads from a file object.""" + _load_arg_defaults(kwargs, app=app) + if not PY2: + fp = _wrap_reader_for_text(fp, kwargs.pop("encoding", None) or "utf-8") + return _json.load(fp, **kwargs) + + +def htmlsafe_dumps(obj, **kwargs): + """Works exactly like :func:`dumps` but is safe for use in `` +
+''' + +__all__ = ["RecaptchaWidget"] + + +class RecaptchaWidget(object): + + def recaptcha_html(self, public_key): + html = current_app.config.get('RECAPTCHA_HTML') + if html: + return Markup(html) + params = current_app.config.get('RECAPTCHA_PARAMETERS') + script = RECAPTCHA_SCRIPT + if params: + script += u'?' + url_encode(params) + + attrs = current_app.config.get('RECAPTCHA_DATA_ATTRS', {}) + attrs['sitekey'] = public_key + snippet = u' '.join([u'data-%s="%s"' % (k, attrs[k]) for k in attrs]) + return Markup(RECAPTCHA_TEMPLATE % (script, snippet)) + + def __call__(self, field, error=None, **kwargs): + """Returns the recaptcha input HTML.""" + + try: + public_key = current_app.config['RECAPTCHA_PUBLIC_KEY'] + except KeyError: + raise RuntimeError("RECAPTCHA_PUBLIC_KEY config not set") + + return self.recaptcha_html(public_key) diff --git a/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/LICENSE.txt b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..65865a9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/LICENSE.txt @@ -0,0 +1,23 @@ +2009-2018 (c) Benoît Chesneau +2009-2015 (c) Paul J. Davis + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/METADATA b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/METADATA new file mode 100644 index 0000000..148e500 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/METADATA @@ -0,0 +1,111 @@ +Metadata-Version: 2.1 +Name: gunicorn +Version: 19.9.0 +Summary: WSGI HTTP Server for UNIX +Home-page: http://gunicorn.org +Author: Benoit Chesneau +Author-email: benoitc@e-engura.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet +Classifier: Topic :: Utilities +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Requires-Python: >=2.6, !=3.0.*, !=3.1.* +Provides-Extra: tornado +Provides-Extra: gthread +Provides-Extra: eventlet +Provides-Extra: gevent +Provides-Extra: eventlet +Requires-Dist: eventlet (>=0.9.7); extra == 'eventlet' +Provides-Extra: gevent +Requires-Dist: gevent (>=0.13); extra == 'gevent' +Provides-Extra: gthread +Provides-Extra: tornado +Requires-Dist: tornado (>=0.2); extra == 'tornado' + +Gunicorn +-------- + +.. image:: https://img.shields.io/pypi/v/gunicorn.svg?style=flat + :alt: PyPI version + :target: https://pypi.python.org/pypi/gunicorn + +.. image:: https://img.shields.io/pypi/pyversions/gunicorn.svg + :alt: Supported Python versions + :target: https://pypi.python.org/pypi/gunicorn + +.. image:: https://travis-ci.org/benoitc/gunicorn.svg?branch=master + :alt: Build Status + :target: https://travis-ci.org/benoitc/gunicorn + +Gunicorn 'Green Unicorn' is a Python WSGI HTTP Server for UNIX. It's a pre-fork +worker model ported from Ruby's Unicorn_ project. The Gunicorn server is broadly +compatible with various web frameworks, simply implemented, light on server +resource usage, and fairly speedy. + +Feel free to join us in `#gunicorn`_ on Freenode_. + +Documentation +------------- + +The documentation is hosted at http://docs.gunicorn.org. + +Installation +------------ + +Gunicorn requires **Python 2.x >= 2.6** or **Python 3.x >= 3.2**. + +Install from PyPI:: + + $ pip install gunicorn + + +Usage +----- + +Basic usage:: + + $ gunicorn [OPTIONS] APP_MODULE + +Where ``APP_MODULE`` is of the pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. The +module name can be a full dotted path. The variable name refers to a WSGI +callable that should be found in the specified module. + +Example with test app:: + + $ cd examples + $ gunicorn --workers=2 test:app + + +License +------- + +Gunicorn is released under the MIT License. See the LICENSE_ file for more +details. + +.. _Unicorn: https://bogomips.org/unicorn/ +.. _`#gunicorn`: https://webchat.freenode.net/?channels=gunicorn +.. _Freenode: https://freenode.net/ +.. _LICENSE: https://github.com/benoitc/gunicorn/blob/master/LICENSE + + diff --git a/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/RECORD b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/RECORD new file mode 100644 index 0000000..d2b7d16 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/RECORD @@ -0,0 +1,89 @@ +gunicorn/__init__.py,sha256=kRm2HQJytwQi-xmyUfM0cOMyls23DfFDgGFbGI2Gj68,255 +gunicorn/_compat.py,sha256=5cXb6vMfVzInDq-AHNyZfsK-UG5NetDn62nPfqylHSU,9355 +gunicorn/arbiter.py,sha256=AbJNSFnTmx9Qd-vZAqEH3y5fz8ydPmyli_BERNIwdyE,20158 +gunicorn/argparse_compat.py,sha256=gsHDGwo4BSJWHdiaEXy0Emr96NKC0LDYmK5nB7PE8Qc,87791 +gunicorn/config.py,sha256=wYeAJFMweU3FXNF4BdfgZzPC94vUXUnuYgI6lNk-5_U,53420 +gunicorn/debug.py,sha256=UUw-eteLEm_OQ98D6K3XtDjx4Dya2H35zdiu8z7F7uc,2289 +gunicorn/errors.py,sha256=JlDBjag90gMiRwLHG3xzEJzDOntSl1iM32R277-U6j0,919 +gunicorn/glogging.py,sha256=bvnX-sky6HgqJor2JZ9VKZZzT4uh_yOgknkYegB7D7Y,15581 +gunicorn/pidfile.py,sha256=_69tsfF1aHklrMrJe2sHERovMduRByVTv99my7yQ874,2357 +gunicorn/reloader.py,sha256=CPNfYAAvJHazX3NAM7qysSRt0fpiHBGPqBlB0tYKhxs,3839 +gunicorn/selectors.py,sha256=14_UESrpE3AQKXWKeeAUG9vBTzJ0yTYDGtEo6xOtlDY,18997 +gunicorn/six.py,sha256=6N-6RCENPfBtMpN5UmgDfDKmJebbbuPu_Dk3Zf8ngww,27344 +gunicorn/sock.py,sha256=gX2FsdsOGMCtSHbDXn7lsiYYYRc3roQklIJLip1oZQo,6019 +gunicorn/systemd.py,sha256=ffhv17cdv-hDeFAJi1eAVtJskkVciV6cQU75Q2oplqg,1362 +gunicorn/util.py,sha256=Ns_a8Pf7MkaEi0KbV3GsP9aVQ2a_S45EjSE6Iyg2tYU,16229 +gunicorn/app/__init__.py,sha256=GuqstqdkizeV4HRbd8aGMBn0Q8IDOyRU1wMMNqNe5GY,127 +gunicorn/app/base.py,sha256=LKxyziLMPNlK3qm6dPMieELBqfLfmwBFnn9SB-KBogE,6652 +gunicorn/app/pasterapp.py,sha256=AGzZnUpcpw8O8KrizxTgdJBZ4lQdrHgsV0gdx7FVTs8,6046 +gunicorn/app/wsgiapp.py,sha256=ny71qjegQHl_bGMjNfq_aemPrmGEpH2bMRIdph6bj4Q,1870 +gunicorn/http/__init__.py,sha256=b4TF3x5F0VYOPTOeNYwRGR1EYHBaPMhZRMoNeuD5-n0,277 +gunicorn/http/_sendfile.py,sha256=Eqd-s3HlvLuyfGjqaH_Jk72cAtEV8hQv5tb1M1AqcBU,2217 +gunicorn/http/body.py,sha256=MmlZpj_6oRPj3oPVSMQZr0X3KH6ikntxDnVcLgfekZs,7345 +gunicorn/http/errors.py,sha256=sNjF2lm4m2qyZ9l95_U33FRxPXpxXzjnZyYqWS-hxd4,2850 +gunicorn/http/message.py,sha256=G5po0upwbrTyIggb_IEAItIjSi_aDoWYLPQ62o8pOI4,12257 +gunicorn/http/parser.py,sha256=IRMvp0veP4wL8Z4vgNV72CPydCNPdNNIy9u-DlDvvSo,1294 +gunicorn/http/unreader.py,sha256=s4kDW5euiJPsDuHzCqFXUtHCApqIxpShb9dtAyjJw9Y,2019 +gunicorn/http/wsgi.py,sha256=SETzcFoLggub2aMuGduTVELBwJGg9YvvDbkiFbugkwU,12856 +gunicorn/instrument/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +gunicorn/instrument/statsd.py,sha256=5xueDuTZMFtmS8ayGT4sU_OyB9qkEv4Agk-eJwAmhJM,4434 +gunicorn/workers/__init__.py,sha256=arPaAM8HxcK39L2dmDzmMhpK9bsyLOJymuCcBz_qqw0,774 +gunicorn/workers/_gaiohttp.py,sha256=llho90CjwpeAB9ehrYeGmD9VZZAPdcNpVwnrBA3GEZA,5079 +gunicorn/workers/base.py,sha256=nzo4KfCQkO3Y2HKuKVk-xInZUiYay_A5B9e_9NVXU28,9121 +gunicorn/workers/base_async.py,sha256=54VkS3S_wrFD7v3jInhFfkeBhaPnV5UN-cu-i5MoXkc,5575 +gunicorn/workers/gaiohttp.py,sha256=3rhXky6APkhI0D9nwXlogLo_Jd9v98CiEuCy9inzCU4,823 +gunicorn/workers/geventlet.py,sha256=mE-Zw3zh8lOZVaprXcfaoBMmwKeDj6sZzdjmgIsvHXw,4258 +gunicorn/workers/ggevent.py,sha256=OV5KCJ3qlJP5E46sjyWQKGbQ5xGR2SOrZlEtLhIB89s,7412 +gunicorn/workers/gthread.py,sha256=HIoWuylHZfH1wlSh4eZ8wxo1kQ5abvdUaFfKfIsgQvI,12009 +gunicorn/workers/gtornado.py,sha256=LtBWnEX7MNpeGX-YmlBoV1_OOhjkdytFmt1pzOlRPZk,5044 +gunicorn/workers/sync.py,sha256=_vd1JATNLG4MgJppNJG5KWBIzLGYqRzhEAQVz9H11LI,7153 +gunicorn/workers/workertmp.py,sha256=6QINPBrriLvezgkC_hclOOeXLi_owMt_SOA5KPEIN-A,1459 +gunicorn-19.9.0.dist-info/LICENSE.txt,sha256=eJ_hG5Lhyr-890S1_MOSyb1cZ5hgOk6J-SW2M3mE0d8,1136 +gunicorn-19.9.0.dist-info/METADATA,sha256=SBjzTcJcbKUR9ev_rvypyWJYU0qgHvm8KzgfG6FtniE,3388 +gunicorn-19.9.0.dist-info/RECORD,, +gunicorn-19.9.0.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +gunicorn-19.9.0.dist-info/entry_points.txt,sha256=XeFINKRdSUKwJwaVSolO24PuV_YeO71IMF-rOra5JO8,184 +gunicorn-19.9.0.dist-info/top_level.txt,sha256=cdMaa2yhxb8do-WioY9qRHUCfwf55YztjwQCncaInoE,9 +../../../bin/gunicorn,sha256=b9Awha9n-9y1TmP-jMdwOQ0PdQlCikfEZGR1fx_Y9F8,265 +../../../bin/gunicorn_paster,sha256=H4j9zJJFc_eH6MvKMW9YURMd8d1jtuNGhqLXmCMbCgc,267 +gunicorn-19.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +gunicorn/workers/__pycache__/gtornado.cpython-36.pyc,, +gunicorn/workers/__pycache__/sync.cpython-36.pyc,, +gunicorn/workers/__pycache__/ggevent.cpython-36.pyc,, +gunicorn/workers/__pycache__/gthread.cpython-36.pyc,, +gunicorn/workers/__pycache__/gaiohttp.cpython-36.pyc,, +gunicorn/workers/__pycache__/base.cpython-36.pyc,, +gunicorn/workers/__pycache__/_gaiohttp.cpython-36.pyc,, +gunicorn/workers/__pycache__/base_async.cpython-36.pyc,, +gunicorn/workers/__pycache__/workertmp.cpython-36.pyc,, +gunicorn/workers/__pycache__/__init__.cpython-36.pyc,, +gunicorn/workers/__pycache__/geventlet.cpython-36.pyc,, +gunicorn/http/__pycache__/errors.cpython-36.pyc,, +gunicorn/http/__pycache__/parser.cpython-36.pyc,, +gunicorn/http/__pycache__/body.cpython-36.pyc,, +gunicorn/http/__pycache__/message.cpython-36.pyc,, +gunicorn/http/__pycache__/wsgi.cpython-36.pyc,, +gunicorn/http/__pycache__/__init__.cpython-36.pyc,, +gunicorn/http/__pycache__/unreader.cpython-36.pyc,, +gunicorn/http/__pycache__/_sendfile.cpython-36.pyc,, +gunicorn/__pycache__/selectors.cpython-36.pyc,, +gunicorn/__pycache__/arbiter.cpython-36.pyc,, +gunicorn/__pycache__/_compat.cpython-36.pyc,, +gunicorn/__pycache__/glogging.cpython-36.pyc,, +gunicorn/__pycache__/systemd.cpython-36.pyc,, +gunicorn/__pycache__/config.cpython-36.pyc,, +gunicorn/__pycache__/sock.cpython-36.pyc,, +gunicorn/__pycache__/util.cpython-36.pyc,, +gunicorn/__pycache__/six.cpython-36.pyc,, +gunicorn/__pycache__/errors.cpython-36.pyc,, +gunicorn/__pycache__/argparse_compat.cpython-36.pyc,, +gunicorn/__pycache__/reloader.cpython-36.pyc,, +gunicorn/__pycache__/__init__.cpython-36.pyc,, +gunicorn/__pycache__/pidfile.cpython-36.pyc,, +gunicorn/__pycache__/debug.cpython-36.pyc,, +gunicorn/instrument/__pycache__/statsd.cpython-36.pyc,, +gunicorn/instrument/__pycache__/__init__.cpython-36.pyc,, +gunicorn/app/__pycache__/wsgiapp.cpython-36.pyc,, +gunicorn/app/__pycache__/base.cpython-36.pyc,, +gunicorn/app/__pycache__/pasterapp.cpython-36.pyc,, +gunicorn/app/__pycache__/__init__.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/WHEEL b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/entry_points.txt b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/entry_points.txt new file mode 100644 index 0000000..d5b5aa1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/entry_points.txt @@ -0,0 +1,8 @@ + + [console_scripts] + gunicorn=gunicorn.app.wsgiapp:run + gunicorn_paster=gunicorn.app.pasterapp:run + + [paste.server_runner] + main=gunicorn.app.pasterapp:paste_server + \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/top_level.txt new file mode 100644 index 0000000..8f22dcc --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn-19.9.0.dist-info/top_level.txt @@ -0,0 +1 @@ +gunicorn diff --git a/py3/lib/python3.6/site-packages/gunicorn/__init__.py b/py3/lib/python3.6/site-packages/gunicorn/__init__.py new file mode 100644 index 0000000..7820479 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +version_info = (19, 9, 0) +__version__ = ".".join([str(v) for v in version_info]) +SERVER_SOFTWARE = "gunicorn/%s" % __version__ diff --git a/py3/lib/python3.6/site-packages/gunicorn/_compat.py b/py3/lib/python3.6/site-packages/gunicorn/_compat.py new file mode 100644 index 0000000..39dbfdf --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/_compat.py @@ -0,0 +1,298 @@ +import sys + +from gunicorn import six + +PY26 = (sys.version_info[:2] == (2, 6)) +PY33 = (sys.version_info >= (3, 3)) + + +def _check_if_pyc(fname): + """Return True if the extension is .pyc, False if .py + and None if otherwise""" + from imp import find_module + from os.path import realpath, dirname, basename, splitext + + # Normalize the file-path for the find_module() + filepath = realpath(fname) + dirpath = dirname(filepath) + module_name = splitext(basename(filepath))[0] + + # Validate and fetch + try: + fileobj, fullpath, (_, _, pytype) = find_module(module_name, [dirpath]) + except ImportError: + raise IOError("Cannot find config file. " + "Path maybe incorrect! : {0}".format(filepath)) + return pytype, fileobj, fullpath + + +def _get_codeobj(pyfile): + """ Returns the code object, given a python file """ + from imp import PY_COMPILED, PY_SOURCE + + result, fileobj, fullpath = _check_if_pyc(pyfile) + + # WARNING: + # fp.read() can blowup if the module is extremely large file. + # Lookout for overflow errors. + try: + data = fileobj.read() + finally: + fileobj.close() + + # This is a .pyc file. Treat accordingly. + if result is PY_COMPILED: + # .pyc format is as follows: + # 0 - 4 bytes: Magic number, which changes with each create of .pyc file. + # First 2 bytes change with each marshal of .pyc file. Last 2 bytes is "\r\n". + # 4 - 8 bytes: Datetime value, when the .py was last changed. + # 8 - EOF: Marshalled code object data. + # So to get code object, just read the 8th byte onwards till EOF, and + # UN-marshal it. + import marshal + code_obj = marshal.loads(data[8:]) + + elif result is PY_SOURCE: + # This is a .py file. + code_obj = compile(data, fullpath, 'exec') + + else: + # Unsupported extension + raise Exception("Input file is unknown format: {0}".format(fullpath)) + + # Return code object + return code_obj + +if six.PY3: + def execfile_(fname, *args): + if fname.endswith(".pyc"): + code = _get_codeobj(fname) + else: + code = compile(open(fname, 'rb').read(), fname, 'exec') + return six.exec_(code, *args) + + def bytes_to_str(b): + if isinstance(b, six.text_type): + return b + return str(b, 'latin1') + + import urllib.parse + + def unquote_to_wsgi_str(string): + return _unquote_to_bytes(string).decode('latin-1') + + _unquote_to_bytes = urllib.parse.unquote_to_bytes + +else: + def execfile_(fname, *args): + """ Overriding PY2 execfile() implementation to support .pyc files """ + if fname.endswith(".pyc"): + return six.exec_(_get_codeobj(fname), *args) + return execfile(fname, *args) + + def bytes_to_str(s): + if isinstance(s, unicode): + return s.encode('utf-8') + return s + + import urllib + unquote_to_wsgi_str = urllib.unquote + + +# The following code adapted from trollius.py33_exceptions +def _wrap_error(exc, mapping, key): + if key not in mapping: + return + new_err_cls = mapping[key] + new_err = new_err_cls(*exc.args) + + # raise a new exception with the original traceback + six.reraise(new_err_cls, new_err, + exc.__traceback__ if hasattr(exc, '__traceback__') else sys.exc_info()[2]) + +if PY33: + import builtins + + BlockingIOError = builtins.BlockingIOError + BrokenPipeError = builtins.BrokenPipeError + ChildProcessError = builtins.ChildProcessError + ConnectionRefusedError = builtins.ConnectionRefusedError + ConnectionResetError = builtins.ConnectionResetError + InterruptedError = builtins.InterruptedError + ConnectionAbortedError = builtins.ConnectionAbortedError + PermissionError = builtins.PermissionError + FileNotFoundError = builtins.FileNotFoundError + ProcessLookupError = builtins.ProcessLookupError + + def wrap_error(func, *args, **kw): + return func(*args, **kw) +else: + import errno + import select + import socket + + class BlockingIOError(OSError): + pass + + class BrokenPipeError(OSError): + pass + + class ChildProcessError(OSError): + pass + + class ConnectionRefusedError(OSError): + pass + + class InterruptedError(OSError): + pass + + class ConnectionResetError(OSError): + pass + + class ConnectionAbortedError(OSError): + pass + + class PermissionError(OSError): + pass + + class FileNotFoundError(OSError): + pass + + class ProcessLookupError(OSError): + pass + + _MAP_ERRNO = { + errno.EACCES: PermissionError, + errno.EAGAIN: BlockingIOError, + errno.EALREADY: BlockingIOError, + errno.ECHILD: ChildProcessError, + errno.ECONNABORTED: ConnectionAbortedError, + errno.ECONNREFUSED: ConnectionRefusedError, + errno.ECONNRESET: ConnectionResetError, + errno.EINPROGRESS: BlockingIOError, + errno.EINTR: InterruptedError, + errno.ENOENT: FileNotFoundError, + errno.EPERM: PermissionError, + errno.EPIPE: BrokenPipeError, + errno.ESHUTDOWN: BrokenPipeError, + errno.EWOULDBLOCK: BlockingIOError, + errno.ESRCH: ProcessLookupError, + } + + def wrap_error(func, *args, **kw): + """ + Wrap socket.error, IOError, OSError, select.error to raise new specialized + exceptions of Python 3.3 like InterruptedError (PEP 3151). + """ + try: + return func(*args, **kw) + except (socket.error, IOError, OSError) as exc: + if hasattr(exc, 'winerror'): + _wrap_error(exc, _MAP_ERRNO, exc.winerror) + # _MAP_ERRNO does not contain all Windows errors. + # For some errors like "file not found", exc.errno should + # be used (ex: ENOENT). + _wrap_error(exc, _MAP_ERRNO, exc.errno) + raise + except select.error as exc: + if exc.args: + _wrap_error(exc, _MAP_ERRNO, exc.args[0]) + raise + +if PY26: + from urlparse import ( + _parse_cache, MAX_CACHE_SIZE, clear_cache, _splitnetloc, SplitResult, + scheme_chars, + ) + + def urlsplit(url, scheme='', allow_fragments=True): + """Parse a URL into 5 components: + :///?# + Return a 5-tuple: (scheme, netloc, path, query, fragment). + Note that we don't break the components up in smaller bits + (e.g. netloc is a single string) and we don't expand % escapes.""" + allow_fragments = bool(allow_fragments) + key = url, scheme, allow_fragments, type(url), type(scheme) + cached = _parse_cache.get(key, None) + if cached: + return cached + if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth + clear_cache() + netloc = query = fragment = '' + i = url.find(':') + if i > 0: + if url[:i] == 'http': # optimize the common case + scheme = url[:i].lower() + url = url[i+1:] + if url[:2] == '//': + netloc, url = _splitnetloc(url, 2) + if (('[' in netloc and ']' not in netloc) or + (']' in netloc and '[' not in netloc)): + raise ValueError("Invalid IPv6 URL") + if allow_fragments and '#' in url: + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return v + for c in url[:i]: + if c not in scheme_chars: + break + else: + # make sure "url" is not actually a port number (in which case + # "scheme" is really part of the path) + rest = url[i+1:] + if not rest or any(c not in '0123456789' for c in rest): + # not a port number + scheme, url = url[:i].lower(), rest + + if url[:2] == '//': + netloc, url = _splitnetloc(url, 2) + if (('[' in netloc and ']' not in netloc) or + (']' in netloc and '[' not in netloc)): + raise ValueError("Invalid IPv6 URL") + if allow_fragments and '#' in url: + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return v + +else: + from gunicorn.six.moves.urllib.parse import urlsplit + + +import inspect + +if hasattr(inspect, 'signature'): + positionals = ( + inspect.Parameter.POSITIONAL_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD, + ) + + def get_arity(f): + sig = inspect.signature(f) + arity = 0 + + for param in sig.parameters.values(): + if param.kind in positionals: + arity += 1 + + return arity +else: + def get_arity(f): + return len(inspect.getargspec(f)[0]) + + +try: + import html + + def html_escape(s): + return html.escape(s) +except ImportError: + import cgi + + def html_escape(s): + return cgi.escape(s, quote=True) diff --git a/py3/lib/python3.6/site-packages/gunicorn/app/__init__.py b/py3/lib/python3.6/site-packages/gunicorn/app/__init__.py new file mode 100644 index 0000000..87f0611 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/app/__init__.py @@ -0,0 +1,4 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. diff --git a/py3/lib/python3.6/site-packages/gunicorn/app/base.py b/py3/lib/python3.6/site-packages/gunicorn/app/base.py new file mode 100644 index 0000000..e468c95 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/app/base.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +from __future__ import print_function + +import os +import sys +import traceback + +from gunicorn._compat import execfile_ +from gunicorn import util +from gunicorn.arbiter import Arbiter +from gunicorn.config import Config, get_default_config_file +from gunicorn import debug + +class BaseApplication(object): + """ + An application interface for configuring and loading + the various necessities for any given web framework. + """ + def __init__(self, usage=None, prog=None): + self.usage = usage + self.cfg = None + self.callable = None + self.prog = prog + self.logger = None + self.do_load_config() + + def do_load_config(self): + """ + Loads the configuration + """ + try: + self.load_default_config() + self.load_config() + except Exception as e: + print("\nError: %s" % str(e), file=sys.stderr) + sys.stderr.flush() + sys.exit(1) + + def load_default_config(self): + # init configuration + self.cfg = Config(self.usage, prog=self.prog) + + def init(self, parser, opts, args): + raise NotImplementedError + + def load(self): + raise NotImplementedError + + def load_config(self): + """ + This method is used to load the configuration from one or several input(s). + Custom Command line, configuration file. + You have to override this method in your class. + """ + raise NotImplementedError + + def reload(self): + self.do_load_config() + if self.cfg.spew: + debug.spew() + + def wsgi(self): + if self.callable is None: + self.callable = self.load() + return self.callable + + def run(self): + try: + Arbiter(self).run() + except RuntimeError as e: + print("\nError: %s\n" % e, file=sys.stderr) + sys.stderr.flush() + sys.exit(1) + + +class Application(BaseApplication): + + # 'init' and 'load' methods are implemented by WSGIApplication. + # pylint: disable=abstract-method + + def chdir(self): + # chdir to the configured path before loading, + # default is the current dir + os.chdir(self.cfg.chdir) + + # add the path to sys.path + if self.cfg.chdir not in sys.path: + sys.path.insert(0, self.cfg.chdir) + + def get_config_from_filename(self, filename): + + if not os.path.exists(filename): + raise RuntimeError("%r doesn't exist" % filename) + + cfg = { + "__builtins__": __builtins__, + "__name__": "__config__", + "__file__": filename, + "__doc__": None, + "__package__": None + } + try: + execfile_(filename, cfg, cfg) + except Exception: + print("Failed to read config file: %s" % filename, file=sys.stderr) + traceback.print_exc() + sys.stderr.flush() + sys.exit(1) + + return cfg + + def get_config_from_module_name(self, module_name): + return vars(util.import_module(module_name)) + + def load_config_from_module_name_or_filename(self, location): + """ + Loads the configuration file: the file is a python file, otherwise raise an RuntimeError + Exception or stop the process if the configuration file contains a syntax error. + """ + + if location.startswith("python:"): + module_name = location[len("python:"):] + cfg = self.get_config_from_module_name(module_name) + else: + if location.startswith("file:"): + filename = location[len("file:"):] + else: + filename = location + cfg = self.get_config_from_filename(filename) + + for k, v in cfg.items(): + # Ignore unknown names + if k not in self.cfg.settings: + continue + try: + self.cfg.set(k.lower(), v) + except: + print("Invalid value for %s: %s\n" % (k, v), file=sys.stderr) + sys.stderr.flush() + raise + + return cfg + + def load_config_from_file(self, filename): + return self.load_config_from_module_name_or_filename(location=filename) + + def load_config(self): + # parse console args + parser = self.cfg.parser() + args = parser.parse_args() + + # optional settings from apps + cfg = self.init(parser, args, args.args) + + # set up import paths and follow symlinks + self.chdir() + + # Load up the any app specific configuration + if cfg: + for k, v in cfg.items(): + self.cfg.set(k.lower(), v) + + env_args = parser.parse_args(self.cfg.get_cmd_args_from_env()) + + if args.config: + self.load_config_from_file(args.config) + elif env_args.config: + self.load_config_from_file(env_args.config) + else: + default_config = get_default_config_file() + if default_config is not None: + self.load_config_from_file(default_config) + + # Load up environment configuration + for k, v in vars(env_args).items(): + if v is None: + continue + if k == "args": + continue + self.cfg.set(k.lower(), v) + + # Lastly, update the configuration with any command line settings. + for k, v in vars(args).items(): + if v is None: + continue + if k == "args": + continue + self.cfg.set(k.lower(), v) + + # current directory might be changed by the config now + # set up import paths and follow symlinks + self.chdir() + + def run(self): + if self.cfg.check_config: + try: + self.load() + except: + msg = "\nError while loading the application:\n" + print(msg, file=sys.stderr) + traceback.print_exc() + sys.stderr.flush() + sys.exit(1) + sys.exit(0) + + if self.cfg.spew: + debug.spew() + + if self.cfg.daemon: + util.daemonize(self.cfg.enable_stdio_inheritance) + + # set python paths + if self.cfg.pythonpath: + paths = self.cfg.pythonpath.split(",") + for path in paths: + pythonpath = os.path.abspath(path) + if pythonpath not in sys.path: + sys.path.insert(0, pythonpath) + + super(Application, self).run() diff --git a/py3/lib/python3.6/site-packages/gunicorn/app/pasterapp.py b/py3/lib/python3.6/site-packages/gunicorn/app/pasterapp.py new file mode 100644 index 0000000..dbcd339 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/app/pasterapp.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +from __future__ import print_function + +# pylint: skip-file + +import os +import pkg_resources +import sys + +try: + import configparser as ConfigParser +except ImportError: + import ConfigParser + +from paste.deploy import loadapp, loadwsgi +SERVER = loadwsgi.SERVER + +from gunicorn.app.base import Application +from gunicorn.config import Config, get_default_config_file +from gunicorn import util + + +def _has_logging_config(paste_file): + cfg_parser = ConfigParser.ConfigParser() + cfg_parser.read([paste_file]) + return cfg_parser.has_section('loggers') + + +def paste_config(gconfig, config_url, relative_to, global_conf=None): + # add entry to pkg_resources + sys.path.insert(0, relative_to) + pkg_resources.working_set.add_entry(relative_to) + + config_url = config_url.split('#')[0] + cx = loadwsgi.loadcontext(SERVER, config_url, relative_to=relative_to, + global_conf=global_conf) + gc, lc = cx.global_conf.copy(), cx.local_conf.copy() + cfg = {} + + host, port = lc.pop('host', ''), lc.pop('port', '') + if host and port: + cfg['bind'] = '%s:%s' % (host, port) + elif host: + cfg['bind'] = host.split(',') + + cfg['default_proc_name'] = gc.get('__file__') + + # init logging configuration + config_file = config_url.split(':')[1] + if _has_logging_config(config_file): + cfg.setdefault('logconfig', config_file) + + for k, v in gc.items(): + if k not in gconfig.settings: + continue + cfg[k] = v + + for k, v in lc.items(): + if k not in gconfig.settings: + continue + cfg[k] = v + + return cfg + + +def load_pasteapp(config_url, relative_to, global_conf=None): + return loadapp(config_url, relative_to=relative_to, + global_conf=global_conf) + +class PasterBaseApplication(Application): + gcfg = None + + def app_config(self): + return paste_config(self.cfg, self.cfgurl, self.relpath, + global_conf=self.gcfg) + + def load_config(self): + super(PasterBaseApplication, self).load_config() + + # reload logging conf + if hasattr(self, "cfgfname"): + parser = ConfigParser.ConfigParser() + parser.read([self.cfgfname]) + if parser.has_section('loggers'): + from logging.config import fileConfig + config_file = os.path.abspath(self.cfgfname) + fileConfig(config_file, dict(__file__=config_file, + here=os.path.dirname(config_file))) + + +class PasterApplication(PasterBaseApplication): + + def init(self, parser, opts, args): + if len(args) != 1: + parser.error("No application name specified.") + + cwd = util.getcwd() + cfgfname = os.path.normpath(os.path.join(cwd, args[0])) + cfgfname = os.path.abspath(cfgfname) + if not os.path.exists(cfgfname): + parser.error("Config file not found: %s" % cfgfname) + + self.cfgurl = 'config:%s' % cfgfname + self.relpath = os.path.dirname(cfgfname) + self.cfgfname = cfgfname + + sys.path.insert(0, self.relpath) + pkg_resources.working_set.add_entry(self.relpath) + + return self.app_config() + + def load(self): + # chdir to the configured path before loading, + # default is the current dir + os.chdir(self.cfg.chdir) + + return load_pasteapp(self.cfgurl, self.relpath, global_conf=self.gcfg) + + +class PasterServerApplication(PasterBaseApplication): + + def __init__(self, app, gcfg=None, host="127.0.0.1", port=None, **kwargs): + # pylint: disable=super-init-not-called + self.cfg = Config() + self.gcfg = gcfg # need to hold this for app_config + self.app = app + self.callable = None + + gcfg = gcfg or {} + cfgfname = gcfg.get("__file__") + if cfgfname is not None: + self.cfgurl = 'config:%s' % cfgfname + self.relpath = os.path.dirname(cfgfname) + self.cfgfname = cfgfname + + cfg = kwargs.copy() + + if port and not host.startswith("unix:"): + bind = "%s:%s" % (host, port) + else: + bind = host + cfg["bind"] = bind.split(',') + + if gcfg: + for k, v in gcfg.items(): + cfg[k] = v + cfg["default_proc_name"] = cfg['__file__'] + + try: + for k, v in cfg.items(): + if k.lower() in self.cfg.settings and v is not None: + self.cfg.set(k.lower(), v) + except Exception as e: + print("\nConfig error: %s" % str(e), file=sys.stderr) + sys.stderr.flush() + sys.exit(1) + + if cfg.get("config"): + self.load_config_from_file(cfg["config"]) + else: + default_config = get_default_config_file() + if default_config is not None: + self.load_config_from_file(default_config) + + def load(self): + return self.app + + +def run(): + """\ + The ``gunicorn_paster`` command for launching Paster compatible + applications like Pylons or Turbogears2 + """ + util.warn("""This command is deprecated. + + You should now use the `--paste` option. Ex.: + + gunicorn --paste development.ini + """) + + from gunicorn.app.pasterapp import PasterApplication + PasterApplication("%(prog)s [OPTIONS] pasteconfig.ini").run() + + +def paste_server(app, gcfg=None, host="127.0.0.1", port=None, **kwargs): + """\ + A paster server. + + Then entry point in your paster ini file should looks like this: + + [server:main] + use = egg:gunicorn#main + host = 127.0.0.1 + port = 5000 + + """ + + util.warn("""This command is deprecated. + + You should now use the `--paste` option. Ex.: + + gunicorn --paste development.ini + """) + + from gunicorn.app.pasterapp import PasterServerApplication + PasterServerApplication(app, gcfg=gcfg, host=host, port=port, **kwargs).run() diff --git a/py3/lib/python3.6/site-packages/gunicorn/app/wsgiapp.py b/py3/lib/python3.6/site-packages/gunicorn/app/wsgiapp.py new file mode 100644 index 0000000..2205944 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/app/wsgiapp.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os + +from gunicorn.errors import ConfigError +from gunicorn.app.base import Application +from gunicorn import util + + +class WSGIApplication(Application): + def init(self, parser, opts, args): + if opts.paste: + app_name = 'main' + path = opts.paste + if '#' in path: + path, app_name = path.split('#') + path = os.path.abspath(os.path.normpath( + os.path.join(util.getcwd(), path))) + + if not os.path.exists(path): + raise ConfigError("%r not found" % path) + + # paste application, load the config + self.cfgurl = 'config:%s#%s' % (path, app_name) + self.relpath = os.path.dirname(path) + + from .pasterapp import paste_config + return paste_config(self.cfg, self.cfgurl, self.relpath) + + if len(args) < 1: + parser.error("No application module specified.") + + self.cfg.set("default_proc_name", args[0]) + self.app_uri = args[0] + + def load_wsgiapp(self): + # load the app + return util.import_app(self.app_uri) + + def load_pasteapp(self): + # load the paste app + from .pasterapp import load_pasteapp + return load_pasteapp(self.cfgurl, self.relpath, global_conf=self.cfg.paste_global_conf) + + def load(self): + if self.cfg.paste is not None: + return self.load_pasteapp() + else: + return self.load_wsgiapp() + + +def run(): + """\ + The ``gunicorn`` command line runner for launching Gunicorn with + generic WSGI applications. + """ + from gunicorn.app.wsgiapp import WSGIApplication + WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run() + + +if __name__ == '__main__': + run() diff --git a/py3/lib/python3.6/site-packages/gunicorn/arbiter.py b/py3/lib/python3.6/site-packages/gunicorn/arbiter.py new file mode 100644 index 0000000..083ee6a --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/arbiter.py @@ -0,0 +1,646 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +from __future__ import print_function + +import errno +import os +import random +import select +import signal +import sys +import time +import traceback + +from gunicorn.errors import HaltServer, AppImportError +from gunicorn.pidfile import Pidfile +from gunicorn import sock, systemd, util + +from gunicorn import __version__, SERVER_SOFTWARE + + +class Arbiter(object): + """ + Arbiter maintain the workers processes alive. It launches or + kills them if needed. It also manages application reloading + via SIGHUP/USR2. + """ + + # A flag indicating if a worker failed to + # to boot. If a worker process exist with + # this error code, the arbiter will terminate. + WORKER_BOOT_ERROR = 3 + + # A flag indicating if an application failed to be loaded + APP_LOAD_ERROR = 4 + + START_CTX = {} + + LISTENERS = [] + WORKERS = {} + PIPE = [] + + # I love dynamic languages + SIG_QUEUE = [] + SIGNALS = [getattr(signal, "SIG%s" % x) + for x in "HUP QUIT INT TERM TTIN TTOU USR1 USR2 WINCH".split()] + SIG_NAMES = dict( + (getattr(signal, name), name[3:].lower()) for name in dir(signal) + if name[:3] == "SIG" and name[3] != "_" + ) + + def __init__(self, app): + os.environ["SERVER_SOFTWARE"] = SERVER_SOFTWARE + + self._num_workers = None + self._last_logged_active_worker_count = None + self.log = None + + self.setup(app) + + self.pidfile = None + self.systemd = False + self.worker_age = 0 + self.reexec_pid = 0 + self.master_pid = 0 + self.master_name = "Master" + + cwd = util.getcwd() + + args = sys.argv[:] + args.insert(0, sys.executable) + + # init start context + self.START_CTX = { + "args": args, + "cwd": cwd, + 0: sys.executable + } + + def _get_num_workers(self): + return self._num_workers + + def _set_num_workers(self, value): + old_value = self._num_workers + self._num_workers = value + self.cfg.nworkers_changed(self, value, old_value) + num_workers = property(_get_num_workers, _set_num_workers) + + def setup(self, app): + self.app = app + self.cfg = app.cfg + + if self.log is None: + self.log = self.cfg.logger_class(app.cfg) + + # reopen files + if 'GUNICORN_FD' in os.environ: + self.log.reopen_files() + + self.worker_class = self.cfg.worker_class + self.address = self.cfg.address + self.num_workers = self.cfg.workers + self.timeout = self.cfg.timeout + self.proc_name = self.cfg.proc_name + + self.log.debug('Current configuration:\n{0}'.format( + '\n'.join( + ' {0}: {1}'.format(config, value.value) + for config, value + in sorted(self.cfg.settings.items(), + key=lambda setting: setting[1])))) + + # set enviroment' variables + if self.cfg.env: + for k, v in self.cfg.env.items(): + os.environ[k] = v + + if self.cfg.preload_app: + self.app.wsgi() + + def start(self): + """\ + Initialize the arbiter. Start listening and set pidfile if needed. + """ + self.log.info("Starting gunicorn %s", __version__) + + if 'GUNICORN_PID' in os.environ: + self.master_pid = int(os.environ.get('GUNICORN_PID')) + self.proc_name = self.proc_name + ".2" + self.master_name = "Master.2" + + self.pid = os.getpid() + if self.cfg.pidfile is not None: + pidname = self.cfg.pidfile + if self.master_pid != 0: + pidname += ".2" + self.pidfile = Pidfile(pidname) + self.pidfile.create(self.pid) + self.cfg.on_starting(self) + + self.init_signals() + + if not self.LISTENERS: + fds = None + listen_fds = systemd.listen_fds() + if listen_fds: + self.systemd = True + fds = range(systemd.SD_LISTEN_FDS_START, + systemd.SD_LISTEN_FDS_START + listen_fds) + + elif self.master_pid: + fds = [] + for fd in os.environ.pop('GUNICORN_FD').split(','): + fds.append(int(fd)) + + self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds) + + listeners_str = ",".join([str(l) for l in self.LISTENERS]) + self.log.debug("Arbiter booted") + self.log.info("Listening at: %s (%s)", listeners_str, self.pid) + self.log.info("Using worker: %s", self.cfg.worker_class_str) + + # check worker class requirements + if hasattr(self.worker_class, "check_config"): + self.worker_class.check_config(self.cfg, self.log) + + self.cfg.when_ready(self) + + def init_signals(self): + """\ + Initialize master signal handling. Most of the signals + are queued. Child signals only wake up the master. + """ + # close old PIPE + for p in self.PIPE: + os.close(p) + + # initialize the pipe + self.PIPE = pair = os.pipe() + for p in pair: + util.set_non_blocking(p) + util.close_on_exec(p) + + self.log.close_on_exec() + + # initialize all signals + for s in self.SIGNALS: + signal.signal(s, self.signal) + signal.signal(signal.SIGCHLD, self.handle_chld) + + def signal(self, sig, frame): + if len(self.SIG_QUEUE) < 5: + self.SIG_QUEUE.append(sig) + self.wakeup() + + def run(self): + "Main master loop." + self.start() + util._setproctitle("master [%s]" % self.proc_name) + + try: + self.manage_workers() + + while True: + self.maybe_promote_master() + + sig = self.SIG_QUEUE.pop(0) if self.SIG_QUEUE else None + if sig is None: + self.sleep() + self.murder_workers() + self.manage_workers() + continue + + if sig not in self.SIG_NAMES: + self.log.info("Ignoring unknown signal: %s", sig) + continue + + signame = self.SIG_NAMES.get(sig) + handler = getattr(self, "handle_%s" % signame, None) + if not handler: + self.log.error("Unhandled signal: %s", signame) + continue + self.log.info("Handling signal: %s", signame) + handler() + self.wakeup() + except StopIteration: + self.halt() + except KeyboardInterrupt: + self.halt() + except HaltServer as inst: + self.halt(reason=inst.reason, exit_status=inst.exit_status) + except SystemExit: + raise + except Exception: + self.log.info("Unhandled exception in main loop", + exc_info=True) + self.stop(False) + if self.pidfile is not None: + self.pidfile.unlink() + sys.exit(-1) + + def handle_chld(self, sig, frame): + "SIGCHLD handling" + self.reap_workers() + self.wakeup() + + def handle_hup(self): + """\ + HUP handling. + - Reload configuration + - Start the new worker processes with a new configuration + - Gracefully shutdown the old worker processes + """ + self.log.info("Hang up: %s", self.master_name) + self.reload() + + def handle_term(self): + "SIGTERM handling" + raise StopIteration + + def handle_int(self): + "SIGINT handling" + self.stop(False) + raise StopIteration + + def handle_quit(self): + "SIGQUIT handling" + self.stop(False) + raise StopIteration + + def handle_ttin(self): + """\ + SIGTTIN handling. + Increases the number of workers by one. + """ + self.num_workers += 1 + self.manage_workers() + + def handle_ttou(self): + """\ + SIGTTOU handling. + Decreases the number of workers by one. + """ + if self.num_workers <= 1: + return + self.num_workers -= 1 + self.manage_workers() + + def handle_usr1(self): + """\ + SIGUSR1 handling. + Kill all workers by sending them a SIGUSR1 + """ + self.log.reopen_files() + self.kill_workers(signal.SIGUSR1) + + def handle_usr2(self): + """\ + SIGUSR2 handling. + Creates a new master/worker set as a slave of the current + master without affecting old workers. Use this to do live + deployment with the ability to backout a change. + """ + self.reexec() + + def handle_winch(self): + """SIGWINCH handling""" + if self.cfg.daemon: + self.log.info("graceful stop of workers") + self.num_workers = 0 + self.kill_workers(signal.SIGTERM) + else: + self.log.debug("SIGWINCH ignored. Not daemonized") + + def maybe_promote_master(self): + if self.master_pid == 0: + return + + if self.master_pid != os.getppid(): + self.log.info("Master has been promoted.") + # reset master infos + self.master_name = "Master" + self.master_pid = 0 + self.proc_name = self.cfg.proc_name + del os.environ['GUNICORN_PID'] + # rename the pidfile + if self.pidfile is not None: + self.pidfile.rename(self.cfg.pidfile) + # reset proctitle + util._setproctitle("master [%s]" % self.proc_name) + + def wakeup(self): + """\ + Wake up the arbiter by writing to the PIPE + """ + try: + os.write(self.PIPE[1], b'.') + except IOError as e: + if e.errno not in [errno.EAGAIN, errno.EINTR]: + raise + + def halt(self, reason=None, exit_status=0): + """ halt arbiter """ + self.stop() + self.log.info("Shutting down: %s", self.master_name) + if reason is not None: + self.log.info("Reason: %s", reason) + if self.pidfile is not None: + self.pidfile.unlink() + self.cfg.on_exit(self) + sys.exit(exit_status) + + def sleep(self): + """\ + Sleep until PIPE is readable or we timeout. + A readable PIPE means a signal occurred. + """ + try: + ready = select.select([self.PIPE[0]], [], [], 1.0) + if not ready[0]: + return + while os.read(self.PIPE[0], 1): + pass + except (select.error, OSError) as e: + # TODO: select.error is a subclass of OSError since Python 3.3. + error_number = getattr(e, 'errno', e.args[0]) + if error_number not in [errno.EAGAIN, errno.EINTR]: + raise + except KeyboardInterrupt: + sys.exit() + + def stop(self, graceful=True): + """\ + Stop workers + + :attr graceful: boolean, If True (the default) workers will be + killed gracefully (ie. trying to wait for the current connection) + """ + + unlink = self.reexec_pid == self.master_pid == 0 and not self.systemd + sock.close_sockets(self.LISTENERS, unlink) + + self.LISTENERS = [] + sig = signal.SIGTERM + if not graceful: + sig = signal.SIGQUIT + limit = time.time() + self.cfg.graceful_timeout + # instruct the workers to exit + self.kill_workers(sig) + # wait until the graceful timeout + while self.WORKERS and time.time() < limit: + time.sleep(0.1) + + self.kill_workers(signal.SIGKILL) + + def reexec(self): + """\ + Relaunch the master and workers. + """ + if self.reexec_pid != 0: + self.log.warning("USR2 signal ignored. Child exists.") + return + + if self.master_pid != 0: + self.log.warning("USR2 signal ignored. Parent exists.") + return + + master_pid = os.getpid() + self.reexec_pid = os.fork() + if self.reexec_pid != 0: + return + + self.cfg.pre_exec(self) + + environ = self.cfg.env_orig.copy() + environ['GUNICORN_PID'] = str(master_pid) + + if self.systemd: + environ['LISTEN_PID'] = str(os.getpid()) + environ['LISTEN_FDS'] = str(len(self.LISTENERS)) + else: + environ['GUNICORN_FD'] = ','.join( + str(l.fileno()) for l in self.LISTENERS) + + os.chdir(self.START_CTX['cwd']) + + # exec the process using the original environment + os.execvpe(self.START_CTX[0], self.START_CTX['args'], environ) + + def reload(self): + old_address = self.cfg.address + + # reset old environment + for k in self.cfg.env: + if k in self.cfg.env_orig: + # reset the key to the value it had before + # we launched gunicorn + os.environ[k] = self.cfg.env_orig[k] + else: + # delete the value set by gunicorn + try: + del os.environ[k] + except KeyError: + pass + + # reload conf + self.app.reload() + self.setup(self.app) + + # reopen log files + self.log.reopen_files() + + # do we need to change listener ? + if old_address != self.cfg.address: + # close all listeners + for l in self.LISTENERS: + l.close() + # init new listeners + self.LISTENERS = sock.create_sockets(self.cfg, self.log) + listeners_str = ",".join([str(l) for l in self.LISTENERS]) + self.log.info("Listening at: %s", listeners_str) + + # do some actions on reload + self.cfg.on_reload(self) + + # unlink pidfile + if self.pidfile is not None: + self.pidfile.unlink() + + # create new pidfile + if self.cfg.pidfile is not None: + self.pidfile = Pidfile(self.cfg.pidfile) + self.pidfile.create(self.pid) + + # set new proc_name + util._setproctitle("master [%s]" % self.proc_name) + + # spawn new workers + for _ in range(self.cfg.workers): + self.spawn_worker() + + # manage workers + self.manage_workers() + + def murder_workers(self): + """\ + Kill unused/idle workers + """ + if not self.timeout: + return + workers = list(self.WORKERS.items()) + for (pid, worker) in workers: + try: + if time.time() - worker.tmp.last_update() <= self.timeout: + continue + except (OSError, ValueError): + continue + + if not worker.aborted: + self.log.critical("WORKER TIMEOUT (pid:%s)", pid) + worker.aborted = True + self.kill_worker(pid, signal.SIGABRT) + else: + self.kill_worker(pid, signal.SIGKILL) + + def reap_workers(self): + """\ + Reap workers to avoid zombie processes + """ + try: + while True: + wpid, status = os.waitpid(-1, os.WNOHANG) + if not wpid: + break + if self.reexec_pid == wpid: + self.reexec_pid = 0 + else: + # A worker was terminated. If the termination reason was + # that it could not boot, we'll shut it down to avoid + # infinite start/stop cycles. + exitcode = status >> 8 + if exitcode == self.WORKER_BOOT_ERROR: + reason = "Worker failed to boot." + raise HaltServer(reason, self.WORKER_BOOT_ERROR) + if exitcode == self.APP_LOAD_ERROR: + reason = "App failed to load." + raise HaltServer(reason, self.APP_LOAD_ERROR) + + worker = self.WORKERS.pop(wpid, None) + if not worker: + continue + worker.tmp.close() + self.cfg.child_exit(self, worker) + except OSError as e: + if e.errno != errno.ECHILD: + raise + + def manage_workers(self): + """\ + Maintain the number of workers by spawning or killing + as required. + """ + if len(self.WORKERS.keys()) < self.num_workers: + self.spawn_workers() + + workers = self.WORKERS.items() + workers = sorted(workers, key=lambda w: w[1].age) + while len(workers) > self.num_workers: + (pid, _) = workers.pop(0) + self.kill_worker(pid, signal.SIGTERM) + + active_worker_count = len(workers) + if self._last_logged_active_worker_count != active_worker_count: + self._last_logged_active_worker_count = active_worker_count + self.log.debug("{0} workers".format(active_worker_count), + extra={"metric": "gunicorn.workers", + "value": active_worker_count, + "mtype": "gauge"}) + + def spawn_worker(self): + self.worker_age += 1 + worker = self.worker_class(self.worker_age, self.pid, self.LISTENERS, + self.app, self.timeout / 2.0, + self.cfg, self.log) + self.cfg.pre_fork(self, worker) + pid = os.fork() + if pid != 0: + worker.pid = pid + self.WORKERS[pid] = worker + return pid + + # Do not inherit the temporary files of other workers + for sibling in self.WORKERS.values(): + sibling.tmp.close() + + # Process Child + worker.pid = os.getpid() + try: + util._setproctitle("worker [%s]" % self.proc_name) + self.log.info("Booting worker with pid: %s", worker.pid) + self.cfg.post_fork(self, worker) + worker.init_process() + sys.exit(0) + except SystemExit: + raise + except AppImportError as e: + self.log.debug("Exception while loading the application", + exc_info=True) + print("%s" % e, file=sys.stderr) + sys.stderr.flush() + sys.exit(self.APP_LOAD_ERROR) + except: + self.log.exception("Exception in worker process") + if not worker.booted: + sys.exit(self.WORKER_BOOT_ERROR) + sys.exit(-1) + finally: + self.log.info("Worker exiting (pid: %s)", worker.pid) + try: + worker.tmp.close() + self.cfg.worker_exit(self, worker) + except: + self.log.warning("Exception during worker exit:\n%s", + traceback.format_exc()) + + def spawn_workers(self): + """\ + Spawn new workers as needed. + + This is where a worker process leaves the main loop + of the master process. + """ + + for _ in range(self.num_workers - len(self.WORKERS.keys())): + self.spawn_worker() + time.sleep(0.1 * random.random()) + + def kill_workers(self, sig): + """\ + Kill all workers with the signal `sig` + :attr sig: `signal.SIG*` value + """ + worker_pids = list(self.WORKERS.keys()) + for pid in worker_pids: + self.kill_worker(pid, sig) + + def kill_worker(self, pid, sig): + """\ + Kill a worker + + :attr pid: int, worker pid + :attr sig: `signal.SIG*` value + """ + try: + os.kill(pid, sig) + except OSError as e: + if e.errno == errno.ESRCH: + try: + worker = self.WORKERS.pop(pid) + worker.tmp.close() + self.cfg.worker_exit(self, worker) + return + except (KeyError, OSError): + return + raise diff --git a/py3/lib/python3.6/site-packages/gunicorn/argparse_compat.py b/py3/lib/python3.6/site-packages/gunicorn/argparse_compat.py new file mode 100644 index 0000000..32d948c --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/argparse_compat.py @@ -0,0 +1,2362 @@ +# Author: Steven J. Bethard . + +"""Command-line parsing library + +This module is an optparse-inspired command-line parsing library that: + + - handles both optional and positional arguments + - produces highly informative usage messages + - supports parsers that dispatch to sub-parsers + +The following is a simple usage example that sums integers from the +command-line and writes the result to a file:: + + parser = argparse.ArgumentParser( + description='sum the integers at the command line') + parser.add_argument( + 'integers', metavar='int', nargs='+', type=int, + help='an integer to be summed') + parser.add_argument( + '--log', default=sys.stdout, type=argparse.FileType('w'), + help='the file where the sum should be written') + args = parser.parse_args() + args.log.write('%s' % sum(args.integers)) + args.log.close() + +The module contains the following public classes: + + - ArgumentParser -- The main entry point for command-line parsing. As the + example above shows, the add_argument() method is used to populate + the parser with actions for optional and positional arguments. Then + the parse_args() method is invoked to convert the args at the + command-line into an object with attributes. + + - ArgumentError -- The exception raised by ArgumentParser objects when + there are errors with the parser's actions. Errors raised while + parsing the command-line are caught by ArgumentParser and emitted + as command-line messages. + + - FileType -- A factory for defining types of files to be created. As the + example above shows, instances of FileType are typically passed as + the type= argument of add_argument() calls. + + - Action -- The base class for parser actions. Typically actions are + selected by passing strings like 'store_true' or 'append_const' to + the action= argument of add_argument(). However, for greater + customization of ArgumentParser actions, subclasses of Action may + be defined and passed as the action= argument. + + - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter, + ArgumentDefaultsHelpFormatter -- Formatter classes which + may be passed as the formatter_class= argument to the + ArgumentParser constructor. HelpFormatter is the default, + RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser + not to change the formatting for help text, and + ArgumentDefaultsHelpFormatter adds information about argument defaults + to the help. + +All other classes in this module are considered implementation details. +(Also note that HelpFormatter and RawDescriptionHelpFormatter are only +considered public as object names -- the API of the formatter objects is +still considered an implementation detail.) +""" + +__version__ = '1.2.1' +__all__ = [ + 'ArgumentParser', + 'ArgumentError', + 'ArgumentTypeError', + 'FileType', + 'HelpFormatter', + 'ArgumentDefaultsHelpFormatter', + 'RawDescriptionHelpFormatter', + 'RawTextHelpFormatter', + 'Namespace', + 'Action', + 'ONE_OR_MORE', + 'OPTIONAL', + 'PARSER', + 'REMAINDER', + 'SUPPRESS', + 'ZERO_OR_MORE', +] + + +import copy as _copy +import os as _os +import re as _re +import sys as _sys +import textwrap as _textwrap + +from gettext import gettext as _ + +try: + set +except NameError: + # for python < 2.4 compatibility (sets module is there since 2.3): + from sets import Set as set + +try: + basestring +except NameError: + basestring = str + +try: + sorted +except NameError: + # for python < 2.4 compatibility: + def sorted(iterable, reverse=False): + result = list(iterable) + result.sort() + if reverse: + result.reverse() + return result + + +def _callable(obj): + return hasattr(obj, '__call__') or hasattr(obj, '__bases__') + + +SUPPRESS = '==SUPPRESS==' + +OPTIONAL = '?' +ZERO_OR_MORE = '*' +ONE_OR_MORE = '+' +PARSER = 'A...' +REMAINDER = '...' +_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args' + +# ============================= +# Utility functions and classes +# ============================= + +class _AttributeHolder(object): + """Abstract base class that provides __repr__. + + The __repr__ method returns a string in the format:: + ClassName(attr=name, attr=name, ...) + The attributes are determined either by a class-level attribute, + '_kwarg_names', or by inspecting the instance __dict__. + """ + + def __repr__(self): + type_name = type(self).__name__ + arg_strings = [] + for arg in self._get_args(): + arg_strings.append(repr(arg)) + for name, value in self._get_kwargs(): + arg_strings.append('%s=%r' % (name, value)) + return '%s(%s)' % (type_name, ', '.join(arg_strings)) + + def _get_kwargs(self): + return sorted(self.__dict__.items()) + + def _get_args(self): + return [] + + +def _ensure_value(namespace, name, value): + if getattr(namespace, name, None) is None: + setattr(namespace, name, value) + return getattr(namespace, name) + + +# =============== +# Formatting Help +# =============== + +class HelpFormatter(object): + """Formatter for generating usage messages and argument help strings. + + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + + def __init__(self, + prog, + indent_increment=2, + max_help_position=24, + width=None): + + # default setting for width + if width is None: + try: + width = int(_os.environ['COLUMNS']) + except (KeyError, ValueError): + width = 80 + width -= 2 + + self._prog = prog + self._indent_increment = indent_increment + self._max_help_position = max_help_position + self._width = width + + self._current_indent = 0 + self._level = 0 + self._action_max_length = 0 + + self._root_section = self._Section(self, None) + self._current_section = self._root_section + + self._whitespace_matcher = _re.compile(r'\s+') + self._long_break_matcher = _re.compile(r'\n\n\n+') + + # =============================== + # Section and indentation methods + # =============================== + def _indent(self): + self._current_indent += self._indent_increment + self._level += 1 + + def _dedent(self): + self._current_indent -= self._indent_increment + assert self._current_indent >= 0, 'Indent decreased below 0.' + self._level -= 1 + + class _Section(object): + + def __init__(self, formatter, parent, heading=None): + self.formatter = formatter + self.parent = parent + self.heading = heading + self.items = [] + + def format_help(self): + # format the indented section + if self.parent is not None: + self.formatter._indent() + join = self.formatter._join_parts + for func, args in self.items: + func(*args) + item_help = join([func(*args) for func, args in self.items]) + if self.parent is not None: + self.formatter._dedent() + + # return nothing if the section was empty + if not item_help: + return '' + + # add the heading if the section was non-empty + if self.heading is not SUPPRESS and self.heading is not None: + current_indent = self.formatter._current_indent + heading = '%*s%s:\n' % (current_indent, '', self.heading) + else: + heading = '' + + # join the section-initial newline, the heading and the help + return join(['\n', heading, item_help, '\n']) + + def _add_item(self, func, args): + self._current_section.items.append((func, args)) + + # ======================== + # Message building methods + # ======================== + def start_section(self, heading): + self._indent() + section = self._Section(self, self._current_section, heading) + self._add_item(section.format_help, []) + self._current_section = section + + def end_section(self): + self._current_section = self._current_section.parent + self._dedent() + + def add_text(self, text): + if text is not SUPPRESS and text is not None: + self._add_item(self._format_text, [text]) + + def add_usage(self, usage, actions, groups, prefix=None): + if usage is not SUPPRESS: + args = usage, actions, groups, prefix + self._add_item(self._format_usage, args) + + def add_argument(self, action): + if action.help is not SUPPRESS: + + # find all invocations + get_invocation = self._format_action_invocation + invocations = [get_invocation(action)] + for subaction in self._iter_indented_subactions(action): + invocations.append(get_invocation(subaction)) + + # update the maximum item length + invocation_length = max([len(s) for s in invocations]) + action_length = invocation_length + self._current_indent + self._action_max_length = max(self._action_max_length, + action_length) + + # add the item to the list + self._add_item(self._format_action, [action]) + + def add_arguments(self, actions): + for action in actions: + self.add_argument(action) + + # ======================= + # Help-formatting methods + # ======================= + def format_help(self): + help = self._root_section.format_help() + if help: + help = self._long_break_matcher.sub('\n\n', help) + help = help.strip('\n') + '\n' + return help + + def _join_parts(self, part_strings): + return ''.join([part + for part in part_strings + if part and part is not SUPPRESS]) + + def _format_usage(self, usage, actions, groups, prefix): + if prefix is None: + prefix = _('usage: ') + + # if usage is specified, use that + if usage is not None: + usage = usage % dict(prog=self._prog) + + # if no optionals or positionals are available, usage is just prog + elif usage is None and not actions: + usage = '%(prog)s' % dict(prog=self._prog) + + # if optionals and positionals are available, calculate usage + elif usage is None: + prog = '%(prog)s' % dict(prog=self._prog) + + # split optionals from positionals + optionals = [] + positionals = [] + for action in actions: + if action.option_strings: + optionals.append(action) + else: + positionals.append(action) + + # build full usage string + format = self._format_actions_usage + action_usage = format(optionals + positionals, groups) + usage = ' '.join([s for s in [prog, action_usage] if s]) + + # wrap the usage parts if it's too long + text_width = self._width - self._current_indent + if len(prefix) + len(usage) > text_width: + + # break usage into wrappable parts + part_regexp = r'\(.*?\)+|\[.*?\]+|\S+' + opt_usage = format(optionals, groups) + pos_usage = format(positionals, groups) + opt_parts = _re.findall(part_regexp, opt_usage) + pos_parts = _re.findall(part_regexp, pos_usage) + assert ' '.join(opt_parts) == opt_usage + assert ' '.join(pos_parts) == pos_usage + + # helper for wrapping lines + def get_lines(parts, indent, prefix=None): + lines = [] + line = [] + if prefix is not None: + line_len = len(prefix) - 1 + else: + line_len = len(indent) - 1 + for part in parts: + if line_len + 1 + len(part) > text_width: + lines.append(indent + ' '.join(line)) + line = [] + line_len = len(indent) - 1 + line.append(part) + line_len += len(part) + 1 + if line: + lines.append(indent + ' '.join(line)) + if prefix is not None: + lines[0] = lines[0][len(indent):] + return lines + + # if prog is short, follow it with optionals or positionals + if len(prefix) + len(prog) <= 0.75 * text_width: + indent = ' ' * (len(prefix) + len(prog) + 1) + if opt_parts: + lines = get_lines([prog] + opt_parts, indent, prefix) + lines.extend(get_lines(pos_parts, indent)) + elif pos_parts: + lines = get_lines([prog] + pos_parts, indent, prefix) + else: + lines = [prog] + + # if prog is long, put it on its own line + else: + indent = ' ' * len(prefix) + parts = opt_parts + pos_parts + lines = get_lines(parts, indent) + if len(lines) > 1: + lines = [] + lines.extend(get_lines(opt_parts, indent)) + lines.extend(get_lines(pos_parts, indent)) + lines = [prog] + lines + + # join lines into usage + usage = '\n'.join(lines) + + # prefix with 'usage:' + return '%s%s\n\n' % (prefix, usage) + + def _format_actions_usage(self, actions, groups): + # find group indices and identify actions in groups + group_actions = set() + inserts = {} + for group in groups: + try: + start = actions.index(group._group_actions[0]) + except ValueError: + continue + else: + end = start + len(group._group_actions) + if actions[start:end] == group._group_actions: + for action in group._group_actions: + group_actions.add(action) + if not group.required: + if start in inserts: + inserts[start] += ' [' + else: + inserts[start] = '[' + inserts[end] = ']' + else: + if start in inserts: + inserts[start] += ' (' + else: + inserts[start] = '(' + inserts[end] = ')' + for i in range(start + 1, end): + inserts[i] = '|' + + # collect all actions format strings + parts = [] + for i, action in enumerate(actions): + + # suppressed arguments are marked with None + # remove | separators for suppressed arguments + if action.help is SUPPRESS: + parts.append(None) + if inserts.get(i) == '|': + inserts.pop(i) + elif inserts.get(i + 1) == '|': + inserts.pop(i + 1) + + # produce all arg strings + elif not action.option_strings: + part = self._format_args(action, action.dest) + + # if it's in a group, strip the outer [] + if action in group_actions: + if part[0] == '[' and part[-1] == ']': + part = part[1:-1] + + # add the action string to the list + parts.append(part) + + # produce the first way to invoke the option in brackets + else: + option_string = action.option_strings[0] + + # if the Optional doesn't take a value, format is: + # -s or --long + if action.nargs == 0: + part = '%s' % option_string + + # if the Optional takes a value, format is: + # -s ARGS or --long ARGS + else: + default = action.dest.upper() + args_string = self._format_args(action, default) + part = '%s %s' % (option_string, args_string) + + # make it look optional if it's not required or in a group + if not action.required and action not in group_actions: + part = '[%s]' % part + + # add the action string to the list + parts.append(part) + + # insert things at the necessary indices + for i in sorted(inserts, reverse=True): + parts[i:i] = [inserts[i]] + + # join all the action items with spaces + text = ' '.join([item for item in parts if item is not None]) + + # clean up separators for mutually exclusive groups + open = r'[\[(]' + close = r'[\])]' + text = _re.sub(r'(%s) ' % open, r'\1', text) + text = _re.sub(r' (%s)' % close, r'\1', text) + text = _re.sub(r'%s *%s' % (open, close), r'', text) + text = _re.sub(r'\(([^|]*)\)', r'\1', text) + text = text.strip() + + # return the text + return text + + def _format_text(self, text): + if '%(prog)' in text: + text = text % dict(prog=self._prog) + text_width = self._width - self._current_indent + indent = ' ' * self._current_indent + return self._fill_text(text, text_width, indent) + '\n\n' + + def _format_action(self, action): + # determine the required width and the entry label + help_position = min(self._action_max_length + 2, + self._max_help_position) + help_width = self._width - help_position + action_width = help_position - self._current_indent - 2 + action_header = self._format_action_invocation(action) + + # ho nelp; start on same line and add a final newline + if not action.help: + tup = self._current_indent, '', action_header + action_header = '%*s%s\n' % tup + + # short action name; start on the same line and pad two spaces + elif len(action_header) <= action_width: + tup = self._current_indent, '', action_width, action_header + action_header = '%*s%-*s ' % tup + indent_first = 0 + + # long action name; start on the next line + else: + tup = self._current_indent, '', action_header + action_header = '%*s%s\n' % tup + indent_first = help_position + + # collect the pieces of the action help + parts = [action_header] + + # if there was help for the action, add lines of help text + if action.help: + help_text = self._expand_help(action) + help_lines = self._split_lines(help_text, help_width) + parts.append('%*s%s\n' % (indent_first, '', help_lines[0])) + for line in help_lines[1:]: + parts.append('%*s%s\n' % (help_position, '', line)) + + # or add a newline if the description doesn't end with one + elif not action_header.endswith('\n'): + parts.append('\n') + + # if there are any sub-actions, add their help as well + for subaction in self._iter_indented_subactions(action): + parts.append(self._format_action(subaction)) + + # return a single string + return self._join_parts(parts) + + def _format_action_invocation(self, action): + if not action.option_strings: + metavar, = self._metavar_formatter(action, action.dest)(1) + return metavar + + else: + parts = [] + + # if the Optional doesn't take a value, format is: + # -s, --long + if action.nargs == 0: + parts.extend(action.option_strings) + + # if the Optional takes a value, format is: + # -s ARGS, --long ARGS + else: + default = action.dest.upper() + args_string = self._format_args(action, default) + for option_string in action.option_strings: + parts.append('%s %s' % (option_string, args_string)) + + return ', '.join(parts) + + def _metavar_formatter(self, action, default_metavar): + if action.metavar is not None: + result = action.metavar + elif action.choices is not None: + choice_strs = [str(choice) for choice in action.choices] + result = '{%s}' % ','.join(choice_strs) + else: + result = default_metavar + + def format(tuple_size): + if isinstance(result, tuple): + return result + else: + return (result, ) * tuple_size + return format + + def _format_args(self, action, default_metavar): + get_metavar = self._metavar_formatter(action, default_metavar) + if action.nargs is None: + result = '%s' % get_metavar(1) + elif action.nargs == OPTIONAL: + result = '[%s]' % get_metavar(1) + elif action.nargs == ZERO_OR_MORE: + result = '[%s [%s ...]]' % get_metavar(2) + elif action.nargs == ONE_OR_MORE: + result = '%s [%s ...]' % get_metavar(2) + elif action.nargs == REMAINDER: + result = '...' + elif action.nargs == PARSER: + result = '%s ...' % get_metavar(1) + else: + formats = ['%s' for _ in range(action.nargs)] + result = ' '.join(formats) % get_metavar(action.nargs) + return result + + def _expand_help(self, action): + params = dict(vars(action), prog=self._prog) + for name in list(params): + if params[name] is SUPPRESS: + del params[name] + for name in list(params): + if hasattr(params[name], '__name__'): + params[name] = params[name].__name__ + if params.get('choices') is not None: + choices_str = ', '.join([str(c) for c in params['choices']]) + params['choices'] = choices_str + return self._get_help_string(action) % params + + def _iter_indented_subactions(self, action): + try: + get_subactions = action._get_subactions + except AttributeError: + pass + else: + self._indent() + for subaction in get_subactions(): + yield subaction + self._dedent() + + def _split_lines(self, text, width): + text = self._whitespace_matcher.sub(' ', text).strip() + return _textwrap.wrap(text, width) + + def _fill_text(self, text, width, indent): + text = self._whitespace_matcher.sub(' ', text).strip() + return _textwrap.fill(text, width, initial_indent=indent, + subsequent_indent=indent) + + def _get_help_string(self, action): + return action.help + + +class RawDescriptionHelpFormatter(HelpFormatter): + """Help message formatter which retains any formatting in descriptions. + + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + + def _fill_text(self, text, width, indent): + return ''.join([indent + line for line in text.splitlines(True)]) + + +class RawTextHelpFormatter(RawDescriptionHelpFormatter): + """Help message formatter which retains formatting of all help text. + + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + + def _split_lines(self, text, width): + return text.splitlines() + + +class ArgumentDefaultsHelpFormatter(HelpFormatter): + """Help message formatter which adds default values to argument help. + + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + + def _get_help_string(self, action): + help = action.help + if '%(default)' not in action.help: + if action.default is not SUPPRESS: + defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] + if action.option_strings or action.nargs in defaulting_nargs: + help += ' (default: %(default)s)' + return help + + +# ===================== +# Options and Arguments +# ===================== + +def _get_action_name(argument): + if argument is None: + return None + elif argument.option_strings: + return '/'.join(argument.option_strings) + elif argument.metavar not in (None, SUPPRESS): + return argument.metavar + elif argument.dest not in (None, SUPPRESS): + return argument.dest + else: + return None + + +class ArgumentError(Exception): + """An error from creating or using an argument (optional or positional). + + The string value of this exception is the message, augmented with + information about the argument that caused it. + """ + + def __init__(self, argument, message): + self.argument_name = _get_action_name(argument) + self.message = message + + def __str__(self): + if self.argument_name is None: + format = '%(message)s' + else: + format = 'argument %(argument_name)s: %(message)s' + return format % dict(message=self.message, + argument_name=self.argument_name) + + +class ArgumentTypeError(Exception): + """An error from trying to convert a command line string to a type.""" + pass + + +# ============== +# Action classes +# ============== + +class Action(_AttributeHolder): + """Information about how to convert command line strings to Python objects. + + Action objects are used by an ArgumentParser to represent the information + needed to parse a single argument from one or more strings from the + command line. The keyword arguments to the Action constructor are also + all attributes of Action instances. + + Keyword Arguments: + + - option_strings -- A list of command-line option strings which + should be associated with this action. + + - dest -- The name of the attribute to hold the created object(s) + + - nargs -- The number of command-line arguments that should be + consumed. By default, one argument will be consumed and a single + value will be produced. Other values include: + - N (an integer) consumes N arguments (and produces a list) + - '?' consumes zero or one arguments + - '*' consumes zero or more arguments (and produces a list) + - '+' consumes one or more arguments (and produces a list) + Note that the difference between the default and nargs=1 is that + with the default, a single value will be produced, while with + nargs=1, a list containing a single value will be produced. + + - const -- The value to be produced if the option is specified and the + option uses an action that takes no values. + + - default -- The value to be produced if the option is not specified. + + - type -- The type which the command-line arguments should be converted + to, should be one of 'string', 'int', 'float', 'complex' or a + callable object that accepts a single string argument. If None, + 'string' is assumed. + + - choices -- A container of values that should be allowed. If not None, + after a command-line argument has been converted to the appropriate + type, an exception will be raised if it is not a member of this + collection. + + - required -- True if the action must always be specified at the + command line. This is only meaningful for optional command-line + arguments. + + - help -- The help string describing the argument. + + - metavar -- The name to be used for the option's argument with the + help string. If None, the 'dest' value will be used as the name. + """ + + def __init__(self, + option_strings, + dest, + nargs=None, + const=None, + default=None, + type=None, + choices=None, + required=False, + help=None, + metavar=None): + self.option_strings = option_strings + self.dest = dest + self.nargs = nargs + self.const = const + self.default = default + self.type = type + self.choices = choices + self.required = required + self.help = help + self.metavar = metavar + + def _get_kwargs(self): + names = [ + 'option_strings', + 'dest', + 'nargs', + 'const', + 'default', + 'type', + 'choices', + 'help', + 'metavar', + ] + return [(name, getattr(self, name)) for name in names] + + def __call__(self, parser, namespace, values, option_string=None): + raise NotImplementedError(_('.__call__() not defined')) + + +class _StoreAction(Action): + + def __init__(self, + option_strings, + dest, + nargs=None, + const=None, + default=None, + type=None, + choices=None, + required=False, + help=None, + metavar=None): + if nargs == 0: + raise ValueError('nargs for store actions must be > 0; if you ' + 'have nothing to store, actions such as store ' + 'true or store const may be more appropriate') + if const is not None and nargs != OPTIONAL: + raise ValueError('nargs must be %r to supply const' % OPTIONAL) + super(_StoreAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=nargs, + const=const, + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, values) + + +class _StoreConstAction(Action): + + def __init__(self, + option_strings, + dest, + const, + default=None, + required=False, + help=None, + metavar=None): + super(_StoreConstAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + const=const, + default=default, + required=required, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, self.const) + + +class _StoreTrueAction(_StoreConstAction): + + def __init__(self, + option_strings, + dest, + default=False, + required=False, + help=None): + super(_StoreTrueAction, self).__init__( + option_strings=option_strings, + dest=dest, + const=True, + default=default, + required=required, + help=help) + + +class _StoreFalseAction(_StoreConstAction): + + def __init__(self, + option_strings, + dest, + default=True, + required=False, + help=None): + super(_StoreFalseAction, self).__init__( + option_strings=option_strings, + dest=dest, + const=False, + default=default, + required=required, + help=help) + + +class _AppendAction(Action): + + def __init__(self, + option_strings, + dest, + nargs=None, + const=None, + default=None, + type=None, + choices=None, + required=False, + help=None, + metavar=None): + if nargs == 0: + raise ValueError('nargs for append actions must be > 0; if arg ' + 'strings are not supplying the value to append, ' + 'the append const action may be more appropriate') + if const is not None and nargs != OPTIONAL: + raise ValueError('nargs must be %r to supply const' % OPTIONAL) + super(_AppendAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=nargs, + const=const, + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + items = _copy.copy(_ensure_value(namespace, self.dest, [])) + items.append(values) + setattr(namespace, self.dest, items) + + +class _AppendConstAction(Action): + + def __init__(self, + option_strings, + dest, + const, + default=None, + required=False, + help=None, + metavar=None): + super(_AppendConstAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + const=const, + default=default, + required=required, + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + items = _copy.copy(_ensure_value(namespace, self.dest, [])) + items.append(self.const) + setattr(namespace, self.dest, items) + + +class _CountAction(Action): + + def __init__(self, + option_strings, + dest, + default=None, + required=False, + help=None): + super(_CountAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + default=default, + required=required, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + new_count = _ensure_value(namespace, self.dest, 0) + 1 + setattr(namespace, self.dest, new_count) + + +class _HelpAction(Action): + + def __init__(self, + option_strings, + dest=SUPPRESS, + default=SUPPRESS, + help=None): + super(_HelpAction, self).__init__( + option_strings=option_strings, + dest=dest, + default=default, + nargs=0, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + parser.print_help() + parser.exit() + + +class _VersionAction(Action): + + def __init__(self, + option_strings, + version=None, + dest=SUPPRESS, + default=SUPPRESS, + help="show program's version number and exit"): + super(_VersionAction, self).__init__( + option_strings=option_strings, + dest=dest, + default=default, + nargs=0, + help=help) + self.version = version + + def __call__(self, parser, namespace, values, option_string=None): + version = self.version + if version is None: + version = parser.version + formatter = parser._get_formatter() + formatter.add_text(version) + parser.exit(message=formatter.format_help()) + + +class _SubParsersAction(Action): + + class _ChoicesPseudoAction(Action): + + def __init__(self, name, help): + sup = super(_SubParsersAction._ChoicesPseudoAction, self) + sup.__init__(option_strings=[], dest=name, help=help) + + def __init__(self, + option_strings, + prog, + parser_class, + dest=SUPPRESS, + help=None, + metavar=None): + + self._prog_prefix = prog + self._parser_class = parser_class + self._name_parser_map = {} + self._choices_actions = [] + + super(_SubParsersAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=PARSER, + choices=self._name_parser_map, + help=help, + metavar=metavar) + + def add_parser(self, name, **kwargs): + # set prog from the existing prefix + if kwargs.get('prog') is None: + kwargs['prog'] = '%s %s' % (self._prog_prefix, name) + + # create a pseudo-action to hold the choice help + if 'help' in kwargs: + help = kwargs.pop('help') + choice_action = self._ChoicesPseudoAction(name, help) + self._choices_actions.append(choice_action) + + # create the parser and add it to the map + parser = self._parser_class(**kwargs) + self._name_parser_map[name] = parser + return parser + + def _get_subactions(self): + return self._choices_actions + + def __call__(self, parser, namespace, values, option_string=None): + parser_name = values[0] + arg_strings = values[1:] + + # set the parser name if requested + if self.dest is not SUPPRESS: + setattr(namespace, self.dest, parser_name) + + # select the parser + try: + parser = self._name_parser_map[parser_name] + except KeyError: + tup = parser_name, ', '.join(self._name_parser_map) + msg = _('unknown parser %r (choices: %s)' % tup) + raise ArgumentError(self, msg) + + # parse all the remaining options into the namespace + # store any unrecognized options on the object, so that the top + # level parser can decide what to do with them + namespace, arg_strings = parser.parse_known_args(arg_strings, namespace) + if arg_strings: + vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, []) + getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings) + + +# ============== +# Type classes +# ============== + +class FileType(object): + """Factory for creating file object types + + Instances of FileType are typically passed as type= arguments to the + ArgumentParser add_argument() method. + + Keyword Arguments: + - mode -- A string indicating how the file is to be opened. Accepts the + same values as the builtin open() function. + - bufsize -- The file's desired buffer size. Accepts the same values as + the builtin open() function. + """ + + def __init__(self, mode='r', bufsize=None): + self._mode = mode + self._bufsize = bufsize + + def __call__(self, string): + # the special argument "-" means sys.std{in,out} + if string == '-': + if 'r' in self._mode: + return _sys.stdin + elif 'w' in self._mode: + return _sys.stdout + else: + msg = _('argument "-" with mode %r' % self._mode) + raise ValueError(msg) + + # all other arguments are used as file names + if self._bufsize: + return open(string, self._mode, self._bufsize) + else: + return open(string, self._mode) + + def __repr__(self): + args = [self._mode, self._bufsize] + args_str = ', '.join([repr(arg) for arg in args if arg is not None]) + return '%s(%s)' % (type(self).__name__, args_str) + +# =========================== +# Optional and Positional Parsing +# =========================== + +class Namespace(_AttributeHolder): + """Simple object for storing attributes. + + Implements equality by attribute names and values, and provides a simple + string representation. + """ + + def __init__(self, **kwargs): + for name in kwargs: + setattr(self, name, kwargs[name]) + + __hash__ = None + + def __eq__(self, other): + return vars(self) == vars(other) + + def __ne__(self, other): + return not (self == other) + + def __contains__(self, key): + return key in self.__dict__ + + +class _ActionsContainer(object): + + def __init__(self, + description, + prefix_chars, + argument_default, + conflict_handler): + super(_ActionsContainer, self).__init__() + + self.description = description + self.argument_default = argument_default + self.prefix_chars = prefix_chars + self.conflict_handler = conflict_handler + + # set up registries + self._registries = {} + + # register actions + self.register('action', None, _StoreAction) + self.register('action', 'store', _StoreAction) + self.register('action', 'store_const', _StoreConstAction) + self.register('action', 'store_true', _StoreTrueAction) + self.register('action', 'store_false', _StoreFalseAction) + self.register('action', 'append', _AppendAction) + self.register('action', 'append_const', _AppendConstAction) + self.register('action', 'count', _CountAction) + self.register('action', 'help', _HelpAction) + self.register('action', 'version', _VersionAction) + self.register('action', 'parsers', _SubParsersAction) + + # raise an exception if the conflict handler is invalid + self._get_handler() + + # action storage + self._actions = [] + self._option_string_actions = {} + + # groups + self._action_groups = [] + self._mutually_exclusive_groups = [] + + # defaults storage + self._defaults = {} + + # determines whether an "option" looks like a negative number + self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$') + + # whether or not there are any optionals that look like negative + # numbers -- uses a list so it can be shared and edited + self._has_negative_number_optionals = [] + + # ==================== + # Registration methods + # ==================== + def register(self, registry_name, value, object): + registry = self._registries.setdefault(registry_name, {}) + registry[value] = object + + def _registry_get(self, registry_name, value, default=None): + return self._registries[registry_name].get(value, default) + + # ================================== + # Namespace default accessor methods + # ================================== + def set_defaults(self, **kwargs): + self._defaults.update(kwargs) + + # if these defaults match any existing arguments, replace + # the previous default on the object with the new one + for action in self._actions: + if action.dest in kwargs: + action.default = kwargs[action.dest] + + def get_default(self, dest): + for action in self._actions: + if action.dest == dest and action.default is not None: + return action.default + return self._defaults.get(dest, None) + + + # ======================= + # Adding argument actions + # ======================= + def add_argument(self, *args, **kwargs): + """ + add_argument(dest, ..., name=value, ...) + add_argument(option_string, option_string, ..., name=value, ...) + """ + + # if no positional args are supplied or only one is supplied and + # it doesn't look like an option string, parse a positional + # argument + chars = self.prefix_chars + if not args or len(args) == 1 and args[0][0] not in chars: + if args and 'dest' in kwargs: + raise ValueError('dest supplied twice for positional argument') + kwargs = self._get_positional_kwargs(*args, **kwargs) + + # otherwise, we're adding an optional argument + else: + kwargs = self._get_optional_kwargs(*args, **kwargs) + + # if no default was supplied, use the parser-level default + if 'default' not in kwargs: + dest = kwargs['dest'] + if dest in self._defaults: + kwargs['default'] = self._defaults[dest] + elif self.argument_default is not None: + kwargs['default'] = self.argument_default + + # create the action object, and add it to the parser + action_class = self._pop_action_class(kwargs) + if not _callable(action_class): + raise ValueError('unknown action "%s"' % action_class) + action = action_class(**kwargs) + + # raise an error if the action type is not callable + type_func = self._registry_get('type', action.type, action.type) + if not _callable(type_func): + raise ValueError('%r is not callable' % type_func) + + return self._add_action(action) + + def add_argument_group(self, *args, **kwargs): + group = _ArgumentGroup(self, *args, **kwargs) + self._action_groups.append(group) + return group + + def add_mutually_exclusive_group(self, **kwargs): + group = _MutuallyExclusiveGroup(self, **kwargs) + self._mutually_exclusive_groups.append(group) + return group + + def _add_action(self, action): + # resolve any conflicts + self._check_conflict(action) + + # add to actions list + self._actions.append(action) + action.container = self + + # index the action by any option strings it has + for option_string in action.option_strings: + self._option_string_actions[option_string] = action + + # set the flag if any option strings look like negative numbers + for option_string in action.option_strings: + if self._negative_number_matcher.match(option_string): + if not self._has_negative_number_optionals: + self._has_negative_number_optionals.append(True) + + # return the created action + return action + + def _remove_action(self, action): + self._actions.remove(action) + + def _add_container_actions(self, container): + # collect groups by titles + title_group_map = {} + for group in self._action_groups: + if group.title in title_group_map: + msg = _('cannot merge actions - two groups are named %r') + raise ValueError(msg % (group.title)) + title_group_map[group.title] = group + + # map each action to its group + group_map = {} + for group in container._action_groups: + + # if a group with the title exists, use that, otherwise + # create a new group matching the container's group + if group.title not in title_group_map: + title_group_map[group.title] = self.add_argument_group( + title=group.title, + description=group.description, + conflict_handler=group.conflict_handler) + + # map the actions to their new group + for action in group._group_actions: + group_map[action] = title_group_map[group.title] + + # add container's mutually exclusive groups + # NOTE: if add_mutually_exclusive_group ever gains title= and + # description= then this code will need to be expanded as above + for group in container._mutually_exclusive_groups: + mutex_group = self.add_mutually_exclusive_group( + required=group.required) + + # map the actions to their new mutex group + for action in group._group_actions: + group_map[action] = mutex_group + + # add all actions to this container or their group + for action in container._actions: + group_map.get(action, self)._add_action(action) + + def _get_positional_kwargs(self, dest, **kwargs): + # make sure required is not specified + if 'required' in kwargs: + msg = _("'required' is an invalid argument for positionals") + raise TypeError(msg) + + # mark positional arguments as required if at least one is + # always required + if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]: + kwargs['required'] = True + if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs: + kwargs['required'] = True + + # return the keyword arguments with no option strings + return dict(kwargs, dest=dest, option_strings=[]) + + def _get_optional_kwargs(self, *args, **kwargs): + # determine short and long option strings + option_strings = [] + long_option_strings = [] + for option_string in args: + # error on strings that don't start with an appropriate prefix + if not option_string[0] in self.prefix_chars: + msg = _('invalid option string %r: ' + 'must start with a character %r') + tup = option_string, self.prefix_chars + raise ValueError(msg % tup) + + # strings starting with two prefix characters are long options + option_strings.append(option_string) + if option_string[0] in self.prefix_chars: + if len(option_string) > 1: + if option_string[1] in self.prefix_chars: + long_option_strings.append(option_string) + + # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x' + dest = kwargs.pop('dest', None) + if dest is None: + if long_option_strings: + dest_option_string = long_option_strings[0] + else: + dest_option_string = option_strings[0] + dest = dest_option_string.lstrip(self.prefix_chars) + if not dest: + msg = _('dest= is required for options like %r') + raise ValueError(msg % option_string) + dest = dest.replace('-', '_') + + # return the updated keyword arguments + return dict(kwargs, dest=dest, option_strings=option_strings) + + def _pop_action_class(self, kwargs, default=None): + action = kwargs.pop('action', default) + return self._registry_get('action', action, action) + + def _get_handler(self): + # determine function from conflict handler string + handler_func_name = '_handle_conflict_%s' % self.conflict_handler + try: + return getattr(self, handler_func_name) + except AttributeError: + msg = _('invalid conflict_resolution value: %r') + raise ValueError(msg % self.conflict_handler) + + def _check_conflict(self, action): + + # find all options that conflict with this option + confl_optionals = [] + for option_string in action.option_strings: + if option_string in self._option_string_actions: + confl_optional = self._option_string_actions[option_string] + confl_optionals.append((option_string, confl_optional)) + + # resolve any conflicts + if confl_optionals: + conflict_handler = self._get_handler() + conflict_handler(action, confl_optionals) + + def _handle_conflict_error(self, action, conflicting_actions): + message = _('conflicting option string(s): %s') + conflict_string = ', '.join([option_string + for option_string, action + in conflicting_actions]) + raise ArgumentError(action, message % conflict_string) + + def _handle_conflict_resolve(self, action, conflicting_actions): + + # remove all conflicting options + for option_string, action in conflicting_actions: + + # remove the conflicting option + action.option_strings.remove(option_string) + self._option_string_actions.pop(option_string, None) + + # if the option now has no option string, remove it from the + # container holding it + if not action.option_strings: + action.container._remove_action(action) + + +class _ArgumentGroup(_ActionsContainer): + + def __init__(self, container, title=None, description=None, **kwargs): + # add any missing keyword arguments by checking the container + update = kwargs.setdefault + update('conflict_handler', container.conflict_handler) + update('prefix_chars', container.prefix_chars) + update('argument_default', container.argument_default) + super_init = super(_ArgumentGroup, self).__init__ + super_init(description=description, **kwargs) + + # group attributes + self.title = title + self._group_actions = [] + + # share most attributes with the container + self._registries = container._registries + self._actions = container._actions + self._option_string_actions = container._option_string_actions + self._defaults = container._defaults + self._has_negative_number_optionals = \ + container._has_negative_number_optionals + + def _add_action(self, action): + action = super(_ArgumentGroup, self)._add_action(action) + self._group_actions.append(action) + return action + + def _remove_action(self, action): + super(_ArgumentGroup, self)._remove_action(action) + self._group_actions.remove(action) + + +class _MutuallyExclusiveGroup(_ArgumentGroup): + + def __init__(self, container, required=False): + super(_MutuallyExclusiveGroup, self).__init__(container) + self.required = required + self._container = container + + def _add_action(self, action): + if action.required: + msg = _('mutually exclusive arguments must be optional') + raise ValueError(msg) + action = self._container._add_action(action) + self._group_actions.append(action) + return action + + def _remove_action(self, action): + self._container._remove_action(action) + self._group_actions.remove(action) + + +class ArgumentParser(_AttributeHolder, _ActionsContainer): + """Object for parsing command line strings into Python objects. + + Keyword Arguments: + - prog -- The name of the program (default: sys.argv[0]) + - usage -- A usage message (default: auto-generated from arguments) + - description -- A description of what the program does + - epilog -- Text following the argument descriptions + - parents -- Parsers whose arguments should be copied into this one + - formatter_class -- HelpFormatter class for printing help messages + - prefix_chars -- Characters that prefix optional arguments + - fromfile_prefix_chars -- Characters that prefix files containing + additional arguments + - argument_default -- The default value for all arguments + - conflict_handler -- String indicating how to handle conflicts + - add_help -- Add a -h/-help option + """ + + def __init__(self, + prog=None, + usage=None, + description=None, + epilog=None, + version=None, + parents=[], + formatter_class=HelpFormatter, + prefix_chars='-', + fromfile_prefix_chars=None, + argument_default=None, + conflict_handler='error', + add_help=True): + + if version is not None: + import warnings + warnings.warn( + """The "version" argument to ArgumentParser is deprecated. """ + """Please use """ + """"add_argument(..., action='version', version="N", ...)" """ + """instead""", DeprecationWarning) + + superinit = super(ArgumentParser, self).__init__ + superinit(description=description, + prefix_chars=prefix_chars, + argument_default=argument_default, + conflict_handler=conflict_handler) + + # default setting for prog + if prog is None: + prog = _os.path.basename(_sys.argv[0]) + + self.prog = prog + self.usage = usage + self.epilog = epilog + self.version = version + self.formatter_class = formatter_class + self.fromfile_prefix_chars = fromfile_prefix_chars + self.add_help = add_help + + add_group = self.add_argument_group + self._positionals = add_group(_('positional arguments')) + self._optionals = add_group(_('optional arguments')) + self._subparsers = None + + # register types + def identity(string): + return string + self.register('type', None, identity) + + # add help and version arguments if necessary + # (using explicit default to override global argument_default) + if '-' in prefix_chars: + default_prefix = '-' + else: + default_prefix = prefix_chars[0] + if self.add_help: + self.add_argument( + default_prefix+'h', default_prefix*2+'help', + action='help', default=SUPPRESS, + help=_('show this help message and exit')) + if self.version: + self.add_argument( + default_prefix+'v', default_prefix*2+'version', + action='version', default=SUPPRESS, + version=self.version, + help=_("show program's version number and exit")) + + # add parent arguments and defaults + for parent in parents: + self._add_container_actions(parent) + try: + defaults = parent._defaults + except AttributeError: + pass + else: + self._defaults.update(defaults) + + # ======================= + # Pretty __repr__ methods + # ======================= + def _get_kwargs(self): + names = [ + 'prog', + 'usage', + 'description', + 'version', + 'formatter_class', + 'conflict_handler', + 'add_help', + ] + return [(name, getattr(self, name)) for name in names] + + # ================================== + # Optional/Positional adding methods + # ================================== + def add_subparsers(self, **kwargs): + if self._subparsers is not None: + self.error(_('cannot have multiple subparser arguments')) + + # add the parser class to the arguments if it's not present + kwargs.setdefault('parser_class', type(self)) + + if 'title' in kwargs or 'description' in kwargs: + title = _(kwargs.pop('title', 'subcommands')) + description = _(kwargs.pop('description', None)) + self._subparsers = self.add_argument_group(title, description) + else: + self._subparsers = self._positionals + + # prog defaults to the usage message of this parser, skipping + # optional arguments and with no "usage:" prefix + if kwargs.get('prog') is None: + formatter = self._get_formatter() + positionals = self._get_positional_actions() + groups = self._mutually_exclusive_groups + formatter.add_usage(self.usage, positionals, groups, '') + kwargs['prog'] = formatter.format_help().strip() + + # create the parsers action and add it to the positionals list + parsers_class = self._pop_action_class(kwargs, 'parsers') + action = parsers_class(option_strings=[], **kwargs) + self._subparsers._add_action(action) + + # return the created parsers action + return action + + def _add_action(self, action): + if action.option_strings: + self._optionals._add_action(action) + else: + self._positionals._add_action(action) + return action + + def _get_optional_actions(self): + return [action + for action in self._actions + if action.option_strings] + + def _get_positional_actions(self): + return [action + for action in self._actions + if not action.option_strings] + + # ===================================== + # Command line argument parsing methods + # ===================================== + def parse_args(self, args=None, namespace=None): + args, argv = self.parse_known_args(args, namespace) + if argv: + msg = _('unrecognized arguments: %s') + self.error(msg % ' '.join(argv)) + return args + + def parse_known_args(self, args=None, namespace=None): + # args default to the system args + if args is None: + args = _sys.argv[1:] + + # default Namespace built from parser defaults + if namespace is None: + namespace = Namespace() + + # add any action defaults that aren't present + for action in self._actions: + if action.dest is not SUPPRESS: + if not hasattr(namespace, action.dest): + if action.default is not SUPPRESS: + default = action.default + if isinstance(action.default, basestring): + default = self._get_value(action, default) + setattr(namespace, action.dest, default) + + # add any parser defaults that aren't present + for dest in self._defaults: + if not hasattr(namespace, dest): + setattr(namespace, dest, self._defaults[dest]) + + # parse the arguments and exit if there are any errors + try: + namespace, args = self._parse_known_args(args, namespace) + if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR): + args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR)) + delattr(namespace, _UNRECOGNIZED_ARGS_ATTR) + return namespace, args + except ArgumentError: + err = _sys.exc_info()[1] + self.error(str(err)) + + def _parse_known_args(self, arg_strings, namespace): + # replace arg strings that are file references + if self.fromfile_prefix_chars is not None: + arg_strings = self._read_args_from_files(arg_strings) + + # map all mutually exclusive arguments to the other arguments + # they can't occur with + action_conflicts = {} + for mutex_group in self._mutually_exclusive_groups: + group_actions = mutex_group._group_actions + for i, mutex_action in enumerate(mutex_group._group_actions): + conflicts = action_conflicts.setdefault(mutex_action, []) + conflicts.extend(group_actions[:i]) + conflicts.extend(group_actions[i + 1:]) + + # find all option indices, and determine the arg_string_pattern + # which has an 'O' if there is an option at an index, + # an 'A' if there is an argument, or a '-' if there is a '--' + option_string_indices = {} + arg_string_pattern_parts = [] + arg_strings_iter = iter(arg_strings) + for i, arg_string in enumerate(arg_strings_iter): + + # all args after -- are non-options + if arg_string == '--': + arg_string_pattern_parts.append('-') + for arg_string in arg_strings_iter: + arg_string_pattern_parts.append('A') + + # otherwise, add the arg to the arg strings + # and note the index if it was an option + else: + option_tuple = self._parse_optional(arg_string) + if option_tuple is None: + pattern = 'A' + else: + option_string_indices[i] = option_tuple + pattern = 'O' + arg_string_pattern_parts.append(pattern) + + # join the pieces together to form the pattern + arg_strings_pattern = ''.join(arg_string_pattern_parts) + + # converts arg strings to the appropriate and then takes the action + seen_actions = set() + seen_non_default_actions = set() + + def take_action(action, argument_strings, option_string=None): + seen_actions.add(action) + argument_values = self._get_values(action, argument_strings) + + # error if this argument is not allowed with other previously + # seen arguments, assuming that actions that use the default + # value don't really count as "present" + if argument_values is not action.default: + seen_non_default_actions.add(action) + for conflict_action in action_conflicts.get(action, []): + if conflict_action in seen_non_default_actions: + msg = _('not allowed with argument %s') + action_name = _get_action_name(conflict_action) + raise ArgumentError(action, msg % action_name) + + # take the action if we didn't receive a SUPPRESS value + # (e.g. from a default) + if argument_values is not SUPPRESS: + action(self, namespace, argument_values, option_string) + + # function to convert arg_strings into an optional action + def consume_optional(start_index): + + # get the optional identified at this index + option_tuple = option_string_indices[start_index] + action, option_string, explicit_arg = option_tuple + + # identify additional optionals in the same arg string + # (e.g. -xyz is the same as -x -y -z if no args are required) + match_argument = self._match_argument + action_tuples = [] + while True: + + # if we found no optional action, skip it + if action is None: + extras.append(arg_strings[start_index]) + return start_index + 1 + + # if there is an explicit argument, try to match the + # optional's string arguments to only this + if explicit_arg is not None: + arg_count = match_argument(action, 'A') + + # if the action is a single-dash option and takes no + # arguments, try to parse more single-dash options out + # of the tail of the option string + chars = self.prefix_chars + if arg_count == 0 and option_string[1] not in chars: + action_tuples.append((action, [], option_string)) + char = option_string[0] + option_string = char + explicit_arg[0] + new_explicit_arg = explicit_arg[1:] or None + optionals_map = self._option_string_actions + if option_string in optionals_map: + action = optionals_map[option_string] + explicit_arg = new_explicit_arg + else: + msg = _('ignored explicit argument %r') + raise ArgumentError(action, msg % explicit_arg) + + # if the action expect exactly one argument, we've + # successfully matched the option; exit the loop + elif arg_count == 1: + stop = start_index + 1 + args = [explicit_arg] + action_tuples.append((action, args, option_string)) + break + + # error if a double-dash option did not use the + # explicit argument + else: + msg = _('ignored explicit argument %r') + raise ArgumentError(action, msg % explicit_arg) + + # if there is no explicit argument, try to match the + # optional's string arguments with the following strings + # if successful, exit the loop + else: + start = start_index + 1 + selected_patterns = arg_strings_pattern[start:] + arg_count = match_argument(action, selected_patterns) + stop = start + arg_count + args = arg_strings[start:stop] + action_tuples.append((action, args, option_string)) + break + + # add the Optional to the list and return the index at which + # the Optional's string args stopped + assert action_tuples + for action, args, option_string in action_tuples: + take_action(action, args, option_string) + return stop + + # the list of Positionals left to be parsed; this is modified + # by consume_positionals() + positionals = self._get_positional_actions() + + # function to convert arg_strings into positional actions + def consume_positionals(start_index): + # match as many Positionals as possible + match_partial = self._match_arguments_partial + selected_pattern = arg_strings_pattern[start_index:] + arg_counts = match_partial(positionals, selected_pattern) + + # slice off the appropriate arg strings for each Positional + # and add the Positional and its args to the list + for action, arg_count in zip(positionals, arg_counts): + args = arg_strings[start_index: start_index + arg_count] + start_index += arg_count + take_action(action, args) + + # slice off the Positionals that we just parsed and return the + # index at which the Positionals' string args stopped + positionals[:] = positionals[len(arg_counts):] + return start_index + + # consume Positionals and Optionals alternately, until we have + # passed the last option string + extras = [] + start_index = 0 + if option_string_indices: + max_option_string_index = max(option_string_indices) + else: + max_option_string_index = -1 + while start_index <= max_option_string_index: + + # consume any Positionals preceding the next option + next_option_string_index = min([ + index + for index in option_string_indices + if index >= start_index]) + if start_index != next_option_string_index: + positionals_end_index = consume_positionals(start_index) + + # only try to parse the next optional if we didn't consume + # the option string during the positionals parsing + if positionals_end_index > start_index: + start_index = positionals_end_index + continue + else: + start_index = positionals_end_index + + # if we consumed all the positionals we could and we're not + # at the index of an option string, there were extra arguments + if start_index not in option_string_indices: + strings = arg_strings[start_index:next_option_string_index] + extras.extend(strings) + start_index = next_option_string_index + + # consume the next optional and any arguments for it + start_index = consume_optional(start_index) + + # consume any positionals following the last Optional + stop_index = consume_positionals(start_index) + + # if we didn't consume all the argument strings, there were extras + extras.extend(arg_strings[stop_index:]) + + # if we didn't use all the Positional objects, there were too few + # arg strings supplied. + if positionals: + self.error(_('too few arguments')) + + # make sure all required actions were present + for action in self._actions: + if action.required: + if action not in seen_actions: + name = _get_action_name(action) + self.error(_('argument %s is required') % name) + + # make sure all required groups had one option present + for group in self._mutually_exclusive_groups: + if group.required: + for action in group._group_actions: + if action in seen_non_default_actions: + break + + # if no actions were used, report the error + else: + names = [_get_action_name(action) + for action in group._group_actions + if action.help is not SUPPRESS] + msg = _('one of the arguments %s is required') + self.error(msg % ' '.join(names)) + + # return the updated namespace and the extra arguments + return namespace, extras + + def _read_args_from_files(self, arg_strings): + # expand arguments referencing files + new_arg_strings = [] + for arg_string in arg_strings: + + # for regular arguments, just add them back into the list + if arg_string[0] not in self.fromfile_prefix_chars: + new_arg_strings.append(arg_string) + + # replace arguments referencing files with the file content + else: + try: + args_file = open(arg_string[1:]) + try: + arg_strings = [] + for arg_line in args_file.read().splitlines(): + for arg in self.convert_arg_line_to_args(arg_line): + arg_strings.append(arg) + arg_strings = self._read_args_from_files(arg_strings) + new_arg_strings.extend(arg_strings) + finally: + args_file.close() + except IOError: + err = _sys.exc_info()[1] + self.error(str(err)) + + # return the modified argument list + return new_arg_strings + + def convert_arg_line_to_args(self, arg_line): + return [arg_line] + + def _match_argument(self, action, arg_strings_pattern): + # match the pattern for this action to the arg strings + nargs_pattern = self._get_nargs_pattern(action) + match = _re.match(nargs_pattern, arg_strings_pattern) + + # raise an exception if we weren't able to find a match + if match is None: + nargs_errors = { + None: _('expected one argument'), + OPTIONAL: _('expected at most one argument'), + ONE_OR_MORE: _('expected at least one argument'), + } + default = _('expected %s argument(s)') % action.nargs + msg = nargs_errors.get(action.nargs, default) + raise ArgumentError(action, msg) + + # return the number of arguments matched + return len(match.group(1)) + + def _match_arguments_partial(self, actions, arg_strings_pattern): + # progressively shorten the actions list by slicing off the + # final actions until we find a match + result = [] + for i in range(len(actions), 0, -1): + actions_slice = actions[:i] + pattern = ''.join([self._get_nargs_pattern(action) + for action in actions_slice]) + match = _re.match(pattern, arg_strings_pattern) + if match is not None: + result.extend([len(string) for string in match.groups()]) + break + + # return the list of arg string counts + return result + + def _parse_optional(self, arg_string): + # if it's an empty string, it was meant to be a positional + if not arg_string: + return None + + # if it doesn't start with a prefix, it was meant to be positional + if not arg_string[0] in self.prefix_chars: + return None + + # if the option string is present in the parser, return the action + if arg_string in self._option_string_actions: + action = self._option_string_actions[arg_string] + return action, arg_string, None + + # if it's just a single character, it was meant to be positional + if len(arg_string) == 1: + return None + + # if the option string before the "=" is present, return the action + if '=' in arg_string: + option_string, explicit_arg = arg_string.split('=', 1) + if option_string in self._option_string_actions: + action = self._option_string_actions[option_string] + return action, option_string, explicit_arg + + # search through all possible prefixes of the option string + # and all actions in the parser for possible interpretations + option_tuples = self._get_option_tuples(arg_string) + + # if multiple actions match, the option string was ambiguous + if len(option_tuples) > 1: + options = ', '.join([option_string + for action, option_string, explicit_arg in option_tuples]) + tup = arg_string, options + self.error(_('ambiguous option: %s could match %s') % tup) + + # if exactly one action matched, this segmentation is good, + # so return the parsed action + elif len(option_tuples) == 1: + option_tuple, = option_tuples + return option_tuple + + # if it was not found as an option, but it looks like a negative + # number, it was meant to be positional + # unless there are negative-number-like options + if self._negative_number_matcher.match(arg_string): + if not self._has_negative_number_optionals: + return None + + # if it contains a space, it was meant to be a positional + if ' ' in arg_string: + return None + + # it was meant to be an optional but there is no such option + # in this parser (though it might be a valid option in a subparser) + return None, arg_string, None + + def _get_option_tuples(self, option_string): + result = [] + + # option strings starting with two prefix characters are only + # split at the '=' + chars = self.prefix_chars + if option_string[0] in chars and option_string[1] in chars: + if '=' in option_string: + option_prefix, explicit_arg = option_string.split('=', 1) + else: + option_prefix = option_string + explicit_arg = None + for option_string in self._option_string_actions: + if option_string.startswith(option_prefix): + action = self._option_string_actions[option_string] + tup = action, option_string, explicit_arg + result.append(tup) + + # single character options can be concatenated with their arguments + # but multiple character options always have to have their argument + # separate + elif option_string[0] in chars and option_string[1] not in chars: + option_prefix = option_string + explicit_arg = None + short_option_prefix = option_string[:2] + short_explicit_arg = option_string[2:] + + for option_string in self._option_string_actions: + if option_string == short_option_prefix: + action = self._option_string_actions[option_string] + tup = action, option_string, short_explicit_arg + result.append(tup) + elif option_string.startswith(option_prefix): + action = self._option_string_actions[option_string] + tup = action, option_string, explicit_arg + result.append(tup) + + # shouldn't ever get here + else: + self.error(_('unexpected option string: %s') % option_string) + + # return the collected option tuples + return result + + def _get_nargs_pattern(self, action): + # in all examples below, we have to allow for '--' args + # which are represented as '-' in the pattern + nargs = action.nargs + + # the default (None) is assumed to be a single argument + if nargs is None: + nargs_pattern = '(-*A-*)' + + # allow zero or one arguments + elif nargs == OPTIONAL: + nargs_pattern = '(-*A?-*)' + + # allow zero or more arguments + elif nargs == ZERO_OR_MORE: + nargs_pattern = '(-*[A-]*)' + + # allow one or more arguments + elif nargs == ONE_OR_MORE: + nargs_pattern = '(-*A[A-]*)' + + # allow any number of options or arguments + elif nargs == REMAINDER: + nargs_pattern = '([-AO]*)' + + # allow one argument followed by any number of options or arguments + elif nargs == PARSER: + nargs_pattern = '(-*A[-AO]*)' + + # all others should be integers + else: + nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs) + + # if this is an optional action, -- is not allowed + if action.option_strings: + nargs_pattern = nargs_pattern.replace('-*', '') + nargs_pattern = nargs_pattern.replace('-', '') + + # return the pattern + return nargs_pattern + + # ======================== + # Value conversion methods + # ======================== + def _get_values(self, action, arg_strings): + # for everything but PARSER args, strip out '--' + if action.nargs not in [PARSER, REMAINDER]: + arg_strings = [s for s in arg_strings if s != '--'] + + # optional argument produces a default when not present + if not arg_strings and action.nargs == OPTIONAL: + if action.option_strings: + value = action.const + else: + value = action.default + if isinstance(value, basestring): + value = self._get_value(action, value) + self._check_value(action, value) + + # when nargs='*' on a positional, if there were no command-line + # args, use the default if it is anything other than None + elif (not arg_strings and action.nargs == ZERO_OR_MORE and + not action.option_strings): + if action.default is not None: + value = action.default + else: + value = arg_strings + self._check_value(action, value) + + # single argument or optional argument produces a single value + elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]: + arg_string, = arg_strings + value = self._get_value(action, arg_string) + self._check_value(action, value) + + # REMAINDER arguments convert all values, checking none + elif action.nargs == REMAINDER: + value = [self._get_value(action, v) for v in arg_strings] + + # PARSER arguments convert all values, but check only the first + elif action.nargs == PARSER: + value = [self._get_value(action, v) for v in arg_strings] + self._check_value(action, value[0]) + + # all other types of nargs produce a list + else: + value = [self._get_value(action, v) for v in arg_strings] + for v in value: + self._check_value(action, v) + + # return the converted value + return value + + def _get_value(self, action, arg_string): + type_func = self._registry_get('type', action.type, action.type) + if not _callable(type_func): + msg = _('%r is not callable') + raise ArgumentError(action, msg % type_func) + + # convert the value to the appropriate type + try: + result = type_func(arg_string) + + # ArgumentTypeErrors indicate errors + except ArgumentTypeError: + name = getattr(action.type, '__name__', repr(action.type)) + msg = str(_sys.exc_info()[1]) + raise ArgumentError(action, msg) + + # TypeErrors or ValueErrors also indicate errors + except (TypeError, ValueError): + name = getattr(action.type, '__name__', repr(action.type)) + msg = _('invalid %s value: %r') + raise ArgumentError(action, msg % (name, arg_string)) + + # return the converted value + return result + + def _check_value(self, action, value): + # converted value must be one of the choices (if specified) + if action.choices is not None and value not in action.choices: + tup = value, ', '.join(map(repr, action.choices)) + msg = _('invalid choice: %r (choose from %s)') % tup + raise ArgumentError(action, msg) + + # ======================= + # Help-formatting methods + # ======================= + def format_usage(self): + formatter = self._get_formatter() + formatter.add_usage(self.usage, self._actions, + self._mutually_exclusive_groups) + return formatter.format_help() + + def format_help(self): + formatter = self._get_formatter() + + # usage + formatter.add_usage(self.usage, self._actions, + self._mutually_exclusive_groups) + + # description + formatter.add_text(self.description) + + # positionals, optionals and user-defined groups + for action_group in self._action_groups: + formatter.start_section(action_group.title) + formatter.add_text(action_group.description) + formatter.add_arguments(action_group._group_actions) + formatter.end_section() + + # epilog + formatter.add_text(self.epilog) + + # determine help from format above + return formatter.format_help() + + def format_version(self): + import warnings + warnings.warn( + 'The format_version method is deprecated -- the "version" ' + 'argument to ArgumentParser is no longer supported.', + DeprecationWarning) + formatter = self._get_formatter() + formatter.add_text(self.version) + return formatter.format_help() + + def _get_formatter(self): + return self.formatter_class(prog=self.prog) + + # ===================== + # Help-printing methods + # ===================== + def print_usage(self, file=None): + if file is None: + file = _sys.stdout + self._print_message(self.format_usage(), file) + + def print_help(self, file=None): + if file is None: + file = _sys.stdout + self._print_message(self.format_help(), file) + + def print_version(self, file=None): + import warnings + warnings.warn( + 'The print_version method is deprecated -- the "version" ' + 'argument to ArgumentParser is no longer supported.', + DeprecationWarning) + self._print_message(self.format_version(), file) + + def _print_message(self, message, file=None): + if message: + if file is None: + file = _sys.stderr + file.write(message) + + # =============== + # Exiting methods + # =============== + def exit(self, status=0, message=None): + if message: + self._print_message(message, _sys.stderr) + _sys.exit(status) + + def error(self, message): + """error(message: string) + + Prints a usage message incorporating the message to stderr and + exits. + + If you override this in a subclass, it should not return -- it + should either exit or raise an exception. + """ + self.print_usage(_sys.stderr) + self.exit(2, _('%s: error: %s\n') % (self.prog, message)) diff --git a/py3/lib/python3.6/site-packages/gunicorn/config.py b/py3/lib/python3.6/site-packages/gunicorn/config.py new file mode 100644 index 0000000..aa97894 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/config.py @@ -0,0 +1,1950 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# Please remember to run "make -C docs html" after update "desc" attributes. + +import copy +import grp +import inspect +try: + import argparse +except ImportError: # python 2.6 + from . import argparse_compat as argparse +import os +import pwd +import re +import ssl +import sys +import textwrap +import shlex + +from gunicorn import __version__ +from gunicorn import _compat +from gunicorn.errors import ConfigError +from gunicorn.reloader import reloader_engines +from gunicorn import six +from gunicorn import util + +KNOWN_SETTINGS = [] +PLATFORM = sys.platform + + +def make_settings(ignore=None): + settings = {} + ignore = ignore or () + for s in KNOWN_SETTINGS: + setting = s() + if setting.name in ignore: + continue + settings[setting.name] = setting.copy() + return settings + + +def auto_int(_, x): + # for compatible with octal numbers in python3 + if re.match(r'0(\d)', x, re.IGNORECASE): + x = x.replace('0', '0o', 1) + return int(x, 0) + + +class Config(object): + + def __init__(self, usage=None, prog=None): + self.settings = make_settings() + self.usage = usage + self.prog = prog or os.path.basename(sys.argv[0]) + self.env_orig = os.environ.copy() + + def __getattr__(self, name): + if name not in self.settings: + raise AttributeError("No configuration setting for: %s" % name) + return self.settings[name].get() + + def __setattr__(self, name, value): + if name != "settings" and name in self.settings: + raise AttributeError("Invalid access!") + super(Config, self).__setattr__(name, value) + + def set(self, name, value): + if name not in self.settings: + raise AttributeError("No configuration setting for: %s" % name) + self.settings[name].set(value) + + def get_cmd_args_from_env(self): + if 'GUNICORN_CMD_ARGS' in self.env_orig: + return shlex.split(self.env_orig['GUNICORN_CMD_ARGS']) + return [] + + def parser(self): + kwargs = { + "usage": self.usage, + "prog": self.prog + } + parser = argparse.ArgumentParser(**kwargs) + parser.add_argument("-v", "--version", + action="version", default=argparse.SUPPRESS, + version="%(prog)s (version " + __version__ + ")\n", + help="show program's version number and exit") + parser.add_argument("args", nargs="*", help=argparse.SUPPRESS) + + keys = sorted(self.settings, key=self.settings.__getitem__) + for k in keys: + self.settings[k].add_option(parser) + + return parser + + @property + def worker_class_str(self): + uri = self.settings['worker_class'].get() + + ## are we using a threaded worker? + is_sync = uri.endswith('SyncWorker') or uri == 'sync' + if is_sync and self.threads > 1: + return "threads" + return uri + + @property + def worker_class(self): + uri = self.settings['worker_class'].get() + + ## are we using a threaded worker? + is_sync = uri.endswith('SyncWorker') or uri == 'sync' + if is_sync and self.threads > 1: + uri = "gunicorn.workers.gthread.ThreadWorker" + + worker_class = util.load_class(uri) + if hasattr(worker_class, "setup"): + worker_class.setup() + return worker_class + + @property + def address(self): + s = self.settings['bind'].get() + return [util.parse_address(_compat.bytes_to_str(bind)) for bind in s] + + @property + def uid(self): + return self.settings['user'].get() + + @property + def gid(self): + return self.settings['group'].get() + + @property + def proc_name(self): + pn = self.settings['proc_name'].get() + if pn is not None: + return pn + else: + return self.settings['default_proc_name'].get() + + @property + def logger_class(self): + uri = self.settings['logger_class'].get() + if uri == "simple": + # support the default + uri = LoggerClass.default + + # if default logger is in use, and statsd is on, automagically switch + # to the statsd logger + if uri == LoggerClass.default: + if 'statsd_host' in self.settings and self.settings['statsd_host'].value is not None: + uri = "gunicorn.instrument.statsd.Statsd" + + logger_class = util.load_class( + uri, + default="gunicorn.glogging.Logger", + section="gunicorn.loggers") + + if hasattr(logger_class, "install"): + logger_class.install() + return logger_class + + @property + def is_ssl(self): + return self.certfile or self.keyfile + + @property + def ssl_options(self): + opts = {} + for name, value in self.settings.items(): + if value.section == 'SSL': + opts[name] = value.get() + return opts + + @property + def env(self): + raw_env = self.settings['raw_env'].get() + env = {} + + if not raw_env: + return env + + for e in raw_env: + s = _compat.bytes_to_str(e) + try: + k, v = s.split('=', 1) + except ValueError: + raise RuntimeError("environment setting %r invalid" % s) + + env[k] = v + + return env + + @property + def sendfile(self): + if self.settings['sendfile'].get() is not None: + return False + + if 'SENDFILE' in os.environ: + sendfile = os.environ['SENDFILE'].lower() + return sendfile in ['y', '1', 'yes', 'true'] + + return True + + @property + def reuse_port(self): + return self.settings['reuse_port'].get() + + @property + def paste_global_conf(self): + raw_global_conf = self.settings['raw_paste_global_conf'].get() + if raw_global_conf is None: + return None + + global_conf = {} + for e in raw_global_conf: + s = _compat.bytes_to_str(e) + try: + k, v = re.split(r'(?= 0.9.7 (or install it via + ``pip install gunicorn[eventlet]``) + * ``gevent`` - Requires gevent >= 0.13 (or install it via + ``pip install gunicorn[gevent]``) + * ``tornado`` - Requires tornado >= 0.2 (or install it via + ``pip install gunicorn[tornado]``) + * ``gthread`` - Python 2 requires the futures package to be installed + (or install it via ``pip install gunicorn[gthread]``) + * ``gaiohttp`` - Deprecated. + + Optionally, you can provide your own worker by giving Gunicorn a + Python path to a subclass of ``gunicorn.workers.base.Worker``. + This alternative syntax will load the gevent class: + ``gunicorn.workers.ggevent.GeventWorker``. + + .. deprecated:: 19.8 + The ``gaiohttp`` worker is deprecated. Please use + ``aiohttp.worker.GunicornWebWorker`` instead. See + :ref:`asyncio-workers` for more information on how to use it. + """ + +class WorkerThreads(Setting): + name = "threads" + section = "Worker Processes" + cli = ["--threads"] + meta = "INT" + validator = validate_pos_int + type = int + default = 1 + desc = """\ + The number of worker threads for handling requests. + + Run each worker with the specified number of threads. + + A positive integer generally in the ``2-4 x $(NUM_CORES)`` range. + You'll want to vary this a bit to find the best for your particular + application's work load. + + If it is not defined, the default is ``1``. + + This setting only affects the Gthread worker type. + + .. note:: + If you try to use the ``sync`` worker type and set the ``threads`` + setting to more than 1, the ``gthread`` worker type will be used + instead. + """ + + +class WorkerConnections(Setting): + name = "worker_connections" + section = "Worker Processes" + cli = ["--worker-connections"] + meta = "INT" + validator = validate_pos_int + type = int + default = 1000 + desc = """\ + The maximum number of simultaneous clients. + + This setting only affects the Eventlet and Gevent worker types. + """ + + +class MaxRequests(Setting): + name = "max_requests" + section = "Worker Processes" + cli = ["--max-requests"] + meta = "INT" + validator = validate_pos_int + type = int + default = 0 + desc = """\ + The maximum number of requests a worker will process before restarting. + + Any value greater than zero will limit the number of requests a work + will process before automatically restarting. This is a simple method + to help limit the damage of memory leaks. + + If this is set to zero (the default) then the automatic worker + restarts are disabled. + """ + + +class MaxRequestsJitter(Setting): + name = "max_requests_jitter" + section = "Worker Processes" + cli = ["--max-requests-jitter"] + meta = "INT" + validator = validate_pos_int + type = int + default = 0 + desc = """\ + The maximum jitter to add to the *max_requests* setting. + + The jitter causes the restart per worker to be randomized by + ``randint(0, max_requests_jitter)``. This is intended to stagger worker + restarts to avoid all workers restarting at the same time. + + .. versionadded:: 19.2 + """ + + +class Timeout(Setting): + name = "timeout" + section = "Worker Processes" + cli = ["-t", "--timeout"] + meta = "INT" + validator = validate_pos_int + type = int + default = 30 + desc = """\ + Workers silent for more than this many seconds are killed and restarted. + + Generally set to thirty seconds. Only set this noticeably higher if + you're sure of the repercussions for sync workers. For the non sync + workers it just means that the worker process is still communicating and + is not tied to the length of time required to handle a single request. + """ + + +class GracefulTimeout(Setting): + name = "graceful_timeout" + section = "Worker Processes" + cli = ["--graceful-timeout"] + meta = "INT" + validator = validate_pos_int + type = int + default = 30 + desc = """\ + Timeout for graceful workers restart. + + After receiving a restart signal, workers have this much time to finish + serving requests. Workers still alive after the timeout (starting from + the receipt of the restart signal) are force killed. + """ + + +class Keepalive(Setting): + name = "keepalive" + section = "Worker Processes" + cli = ["--keep-alive"] + meta = "INT" + validator = validate_pos_int + type = int + default = 2 + desc = """\ + The number of seconds to wait for requests on a Keep-Alive connection. + + Generally set in the 1-5 seconds range for servers with direct connection + to the client (e.g. when you don't have separate load balancer). When + Gunicorn is deployed behind a load balancer, it often makes sense to + set this to a higher value. + + .. note:: + ``sync`` worker does not support persistent connections and will + ignore this option. + """ + + +class LimitRequestLine(Setting): + name = "limit_request_line" + section = "Security" + cli = ["--limit-request-line"] + meta = "INT" + validator = validate_pos_int + type = int + default = 4094 + desc = """\ + The maximum size of HTTP request line in bytes. + + This parameter is used to limit the allowed size of a client's + HTTP request-line. Since the request-line consists of the HTTP + method, URI, and protocol version, this directive places a + restriction on the length of a request-URI allowed for a request + on the server. A server needs this value to be large enough to + hold any of its resource names, including any information that + might be passed in the query part of a GET request. Value is a number + from 0 (unlimited) to 8190. + + This parameter can be used to prevent any DDOS attack. + """ + + +class LimitRequestFields(Setting): + name = "limit_request_fields" + section = "Security" + cli = ["--limit-request-fields"] + meta = "INT" + validator = validate_pos_int + type = int + default = 100 + desc = """\ + Limit the number of HTTP headers fields in a request. + + This parameter is used to limit the number of headers in a request to + prevent DDOS attack. Used with the *limit_request_field_size* it allows + more safety. By default this value is 100 and can't be larger than + 32768. + """ + + +class LimitRequestFieldSize(Setting): + name = "limit_request_field_size" + section = "Security" + cli = ["--limit-request-field_size"] + meta = "INT" + validator = validate_pos_int + type = int + default = 8190 + desc = """\ + Limit the allowed size of an HTTP request header field. + + Value is a positive number or 0. Setting it to 0 will allow unlimited + header field sizes. + + .. warning:: + Setting this parameter to a very high or unlimited value can open + up for DDOS attacks. + """ + + +class Reload(Setting): + name = "reload" + section = 'Debugging' + cli = ['--reload'] + validator = validate_bool + action = 'store_true' + default = False + + desc = '''\ + Restart workers when code changes. + + This setting is intended for development. It will cause workers to be + restarted whenever application code changes. + + The reloader is incompatible with application preloading. When using a + paste configuration be sure that the server block does not import any + application code or the reload will not work as designed. + + The default behavior is to attempt inotify with a fallback to file + system polling. Generally, inotify should be preferred if available + because it consumes less system resources. + + .. note:: + In order to use the inotify reloader, you must have the ``inotify`` + package installed. + ''' + + +class ReloadEngine(Setting): + name = "reload_engine" + section = "Debugging" + cli = ["--reload-engine"] + meta = "STRING" + validator = validate_reload_engine + default = "auto" + desc = """\ + The implementation that should be used to power :ref:`reload`. + + Valid engines are: + + * 'auto' + * 'poll' + * 'inotify' (requires inotify) + + .. versionadded:: 19.7 + """ + + +class ReloadExtraFiles(Setting): + name = "reload_extra_files" + action = "append" + section = "Debugging" + cli = ["--reload-extra-file"] + meta = "FILES" + validator = validate_list_of_existing_files + default = [] + desc = """\ + Extends :ref:`reload` option to also watch and reload on additional files + (e.g., templates, configurations, specifications, etc.). + + .. versionadded:: 19.8 + """ + + +class Spew(Setting): + name = "spew" + section = "Debugging" + cli = ["--spew"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Install a trace function that spews every line executed by the server. + + This is the nuclear option. + """ + + +class ConfigCheck(Setting): + name = "check_config" + section = "Debugging" + cli = ["--check-config"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Check the configuration. + """ + + +class PreloadApp(Setting): + name = "preload_app" + section = "Server Mechanics" + cli = ["--preload"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Load application code before the worker processes are forked. + + By preloading an application you can save some RAM resources as well as + speed up server boot times. Although, if you defer application loading + to each worker process, you can reload your application code easily by + restarting workers. + """ + + +class Sendfile(Setting): + name = "sendfile" + section = "Server Mechanics" + cli = ["--no-sendfile"] + validator = validate_bool + action = "store_const" + const = False + + desc = """\ + Disables the use of ``sendfile()``. + + If not set, the value of the ``SENDFILE`` environment variable is used + to enable or disable its usage. + + .. versionadded:: 19.2 + .. versionchanged:: 19.4 + Swapped ``--sendfile`` with ``--no-sendfile`` to actually allow + disabling. + .. versionchanged:: 19.6 + added support for the ``SENDFILE`` environment variable + """ + + +class ReusePort(Setting): + name = "reuse_port" + section = "Server Mechanics" + cli = ["--reuse-port"] + validator = validate_bool + action = "store_true" + default = False + + desc = """\ + Set the ``SO_REUSEPORT`` flag on the listening socket. + + .. versionadded:: 19.8 + """ + + +class Chdir(Setting): + name = "chdir" + section = "Server Mechanics" + cli = ["--chdir"] + validator = validate_chdir + default = util.getcwd() + desc = """\ + Chdir to specified directory before apps loading. + """ + + +class Daemon(Setting): + name = "daemon" + section = "Server Mechanics" + cli = ["-D", "--daemon"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Daemonize the Gunicorn process. + + Detaches the server from the controlling terminal and enters the + background. + """ + +class Env(Setting): + name = "raw_env" + action = "append" + section = "Server Mechanics" + cli = ["-e", "--env"] + meta = "ENV" + validator = validate_list_string + default = [] + + desc = """\ + Set environment variable (key=value). + + Pass variables to the execution environment. Ex.:: + + $ gunicorn -b 127.0.0.1:8000 --env FOO=1 test:app + + and test for the foo variable environment in your application. + """ + + +class Pidfile(Setting): + name = "pidfile" + section = "Server Mechanics" + cli = ["-p", "--pid"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + A filename to use for the PID file. + + If not set, no PID file will be written. + """ + +class WorkerTmpDir(Setting): + name = "worker_tmp_dir" + section = "Server Mechanics" + cli = ["--worker-tmp-dir"] + meta = "DIR" + validator = validate_string + default = None + desc = """\ + A directory to use for the worker heartbeat temporary file. + + If not set, the default temporary directory will be used. + + .. note:: + The current heartbeat system involves calling ``os.fchmod`` on + temporary file handlers and may block a worker for arbitrary time + if the directory is on a disk-backed filesystem. + + See :ref:`blocking-os-fchmod` for more detailed information + and a solution for avoiding this problem. + """ + + +class User(Setting): + name = "user" + section = "Server Mechanics" + cli = ["-u", "--user"] + meta = "USER" + validator = validate_user + default = os.geteuid() + desc = """\ + Switch worker processes to run as this user. + + A valid user id (as an integer) or the name of a user that can be + retrieved with a call to ``pwd.getpwnam(value)`` or ``None`` to not + change the worker process user. + """ + + +class Group(Setting): + name = "group" + section = "Server Mechanics" + cli = ["-g", "--group"] + meta = "GROUP" + validator = validate_group + default = os.getegid() + desc = """\ + Switch worker process to run as this group. + + A valid group id (as an integer) or the name of a user that can be + retrieved with a call to ``pwd.getgrnam(value)`` or ``None`` to not + change the worker processes group. + """ + +class Umask(Setting): + name = "umask" + section = "Server Mechanics" + cli = ["-m", "--umask"] + meta = "INT" + validator = validate_pos_int + type = auto_int + default = 0 + desc = """\ + A bit mask for the file mode on files written by Gunicorn. + + Note that this affects unix socket permissions. + + A valid value for the ``os.umask(mode)`` call or a string compatible + with ``int(value, 0)`` (``0`` means Python guesses the base, so values + like ``0``, ``0xFF``, ``0022`` are valid for decimal, hex, and octal + representations) + """ + + +class Initgroups(Setting): + name = "initgroups" + section = "Server Mechanics" + cli = ["--initgroups"] + validator = validate_bool + action = 'store_true' + default = False + + desc = """\ + If true, set the worker process's group access list with all of the + groups of which the specified username is a member, plus the specified + group id. + + .. versionadded:: 19.7 + """ + + +class TmpUploadDir(Setting): + name = "tmp_upload_dir" + section = "Server Mechanics" + meta = "DIR" + validator = validate_string + default = None + desc = """\ + Directory to store temporary request data as they are read. + + This may disappear in the near future. + + This path should be writable by the process permissions set for Gunicorn + workers. If not specified, Gunicorn will choose a system generated + temporary directory. + """ + + +class SecureSchemeHeader(Setting): + name = "secure_scheme_headers" + section = "Server Mechanics" + validator = validate_dict + default = { + "X-FORWARDED-PROTOCOL": "ssl", + "X-FORWARDED-PROTO": "https", + "X-FORWARDED-SSL": "on" + } + desc = """\ + + A dictionary containing headers and values that the front-end proxy + uses to indicate HTTPS requests. These tell Gunicorn to set + ``wsgi.url_scheme`` to ``https``, so your application can tell that the + request is secure. + + The dictionary should map upper-case header names to exact string + values. The value comparisons are case-sensitive, unlike the header + names, so make sure they're exactly what your front-end proxy sends + when handling HTTPS requests. + + It is important that your front-end proxy configuration ensures that + the headers defined here can not be passed directly from the client. + """ + + +class ForwardedAllowIPS(Setting): + name = "forwarded_allow_ips" + section = "Server Mechanics" + cli = ["--forwarded-allow-ips"] + meta = "STRING" + validator = validate_string_to_list + default = os.environ.get("FORWARDED_ALLOW_IPS", "127.0.0.1") + desc = """\ + Front-end's IPs from which allowed to handle set secure headers. + (comma separate). + + Set to ``*`` to disable checking of Front-end IPs (useful for setups + where you don't know in advance the IP address of Front-end, but + you still trust the environment). + + By default, the value of the ``FORWARDED_ALLOW_IPS`` environment + variable. If it is not defined, the default is ``"127.0.0.1"``. + """ + + +class AccessLog(Setting): + name = "accesslog" + section = "Logging" + cli = ["--access-logfile"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + The Access log file to write to. + + ``'-'`` means log to stdout. + """ + +class DisableRedirectAccessToSyslog(Setting): + name = "disable_redirect_access_to_syslog" + section = "Logging" + cli = ["--disable-redirect-access-to-syslog"] + validator = validate_bool + action = 'store_true' + default = False + desc = """\ + Disable redirect access logs to syslog. + + .. versionadded:: 19.8 + """ + + +class AccessLogFormat(Setting): + name = "access_log_format" + section = "Logging" + cli = ["--access-logformat"] + meta = "STRING" + validator = validate_string + default = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' + desc = """\ + The access log format. + + =========== =========== + Identifier Description + =========== =========== + h remote address + l ``'-'`` + u user name + t date of the request + r status line (e.g. ``GET / HTTP/1.1``) + m request method + U URL path without query string + q query string + H protocol + s status + B response length + b response length or ``'-'`` (CLF format) + f referer + a user agent + T request time in seconds + D request time in microseconds + L request time in decimal seconds + p process ID + {Header}i request header + {Header}o response header + {Variable}e environment variable + =========== =========== + """ + + +class ErrorLog(Setting): + name = "errorlog" + section = "Logging" + cli = ["--error-logfile", "--log-file"] + meta = "FILE" + validator = validate_string + default = '-' + desc = """\ + The Error log file to write to. + + Using ``'-'`` for FILE makes gunicorn log to stderr. + + .. versionchanged:: 19.2 + Log to stderr by default. + + """ + + +class Loglevel(Setting): + name = "loglevel" + section = "Logging" + cli = ["--log-level"] + meta = "LEVEL" + validator = validate_string + default = "info" + desc = """\ + The granularity of Error log outputs. + + Valid level names are: + + * debug + * info + * warning + * error + * critical + """ + + +class CaptureOutput(Setting): + name = "capture_output" + section = "Logging" + cli = ["--capture-output"] + validator = validate_bool + action = 'store_true' + default = False + desc = """\ + Redirect stdout/stderr to specified file in :ref:`errorlog`. + + .. versionadded:: 19.6 + """ + + +class LoggerClass(Setting): + name = "logger_class" + section = "Logging" + cli = ["--logger-class"] + meta = "STRING" + validator = validate_class + default = "gunicorn.glogging.Logger" + desc = """\ + The logger you want to use to log events in Gunicorn. + + The default class (``gunicorn.glogging.Logger``) handle most of + normal usages in logging. It provides error and access logging. + + You can provide your own logger by giving Gunicorn a + Python path to a subclass like ``gunicorn.glogging.Logger``. + """ + + +class LogConfig(Setting): + name = "logconfig" + section = "Logging" + cli = ["--log-config"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + The log config file to use. + Gunicorn uses the standard Python logging module's Configuration + file format. + """ + + +class LogConfigDict(Setting): + name = "logconfig_dict" + section = "Logging" + cli = ["--log-config-dict"] + validator = validate_dict + default = {} + desc = """\ + The log config dictionary to use, using the standard Python + logging module's dictionary configuration format. This option + takes precedence over the :ref:`logconfig` option, which uses the + older file configuration format. + + Format: https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig + + .. versionadded:: 19.8 + """ + + +class SyslogTo(Setting): + name = "syslog_addr" + section = "Logging" + cli = ["--log-syslog-to"] + meta = "SYSLOG_ADDR" + validator = validate_string + + if PLATFORM == "darwin": + default = "unix:///var/run/syslog" + elif PLATFORM in ('freebsd', 'dragonfly', ): + default = "unix:///var/run/log" + elif PLATFORM == "openbsd": + default = "unix:///dev/log" + else: + default = "udp://localhost:514" + + desc = """\ + Address to send syslog messages. + + Address is a string of the form: + + * ``unix://PATH#TYPE`` : for unix domain socket. ``TYPE`` can be ``stream`` + for the stream driver or ``dgram`` for the dgram driver. + ``stream`` is the default. + * ``udp://HOST:PORT`` : for UDP sockets + * ``tcp://HOST:PORT`` : for TCP sockets + + """ + + +class Syslog(Setting): + name = "syslog" + section = "Logging" + cli = ["--log-syslog"] + validator = validate_bool + action = 'store_true' + default = False + desc = """\ + Send *Gunicorn* logs to syslog. + + .. versionchanged:: 19.8 + You can now disable sending access logs by using the + :ref:`disable-redirect-access-to-syslog` setting. + """ + + +class SyslogPrefix(Setting): + name = "syslog_prefix" + section = "Logging" + cli = ["--log-syslog-prefix"] + meta = "SYSLOG_PREFIX" + validator = validate_string + default = None + desc = """\ + Makes Gunicorn use the parameter as program-name in the syslog entries. + + All entries will be prefixed by ``gunicorn.``. By default the + program name is the name of the process. + """ + + +class SyslogFacility(Setting): + name = "syslog_facility" + section = "Logging" + cli = ["--log-syslog-facility"] + meta = "SYSLOG_FACILITY" + validator = validate_string + default = "user" + desc = """\ + Syslog facility name + """ + + +class EnableStdioInheritance(Setting): + name = "enable_stdio_inheritance" + section = "Logging" + cli = ["-R", "--enable-stdio-inheritance"] + validator = validate_bool + default = False + action = "store_true" + desc = """\ + Enable stdio inheritance. + + Enable inheritance for stdio file descriptors in daemon mode. + + Note: To disable the Python stdout buffering, you can to set the user + environment variable ``PYTHONUNBUFFERED`` . + """ + + +# statsD monitoring +class StatsdHost(Setting): + name = "statsd_host" + section = "Logging" + cli = ["--statsd-host"] + meta = "STATSD_ADDR" + default = None + validator = validate_hostport + desc = """\ + ``host:port`` of the statsd server to log to. + + .. versionadded:: 19.1 + """ + +class StatsdPrefix(Setting): + name = "statsd_prefix" + section = "Logging" + cli = ["--statsd-prefix"] + meta = "STATSD_PREFIX" + default = "" + validator = validate_string + desc = """\ + Prefix to use when emitting statsd metrics (a trailing ``.`` is added, + if not provided). + + .. versionadded:: 19.2 + """ + + +class Procname(Setting): + name = "proc_name" + section = "Process Naming" + cli = ["-n", "--name"] + meta = "STRING" + validator = validate_string + default = None + desc = """\ + A base to use with setproctitle for process naming. + + This affects things like ``ps`` and ``top``. If you're going to be + running more than one instance of Gunicorn you'll probably want to set a + name to tell them apart. This requires that you install the setproctitle + module. + + If not set, the *default_proc_name* setting will be used. + """ + + +class DefaultProcName(Setting): + name = "default_proc_name" + section = "Process Naming" + validator = validate_string + default = "gunicorn" + desc = """\ + Internal setting that is adjusted for each type of application. + """ + + +class PythonPath(Setting): + name = "pythonpath" + section = "Server Mechanics" + cli = ["--pythonpath"] + meta = "STRING" + validator = validate_string + default = None + desc = """\ + A comma-separated list of directories to add to the Python path. + + e.g. + ``'/home/djangoprojects/myproject,/home/python/mylibrary'``. + """ + + +class Paste(Setting): + name = "paste" + section = "Server Mechanics" + cli = ["--paste", "--paster"] + meta = "STRING" + validator = validate_string + default = None + desc = """\ + Load a PasteDeploy config file. The argument may contain a ``#`` + symbol followed by the name of an app section from the config file, + e.g. ``production.ini#admin``. + + At this time, using alternate server blocks is not supported. Use the + command line arguments to control server configuration instead. + """ + + +class OnStarting(Setting): + name = "on_starting" + section = "Server Hooks" + validator = validate_callable(1) + type = six.callable + + def on_starting(server): + pass + default = staticmethod(on_starting) + desc = """\ + Called just before the master process is initialized. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class OnReload(Setting): + name = "on_reload" + section = "Server Hooks" + validator = validate_callable(1) + type = six.callable + + def on_reload(server): + pass + default = staticmethod(on_reload) + desc = """\ + Called to recycle workers during a reload via SIGHUP. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class WhenReady(Setting): + name = "when_ready" + section = "Server Hooks" + validator = validate_callable(1) + type = six.callable + + def when_ready(server): + pass + default = staticmethod(when_ready) + desc = """\ + Called just after the server is started. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class Prefork(Setting): + name = "pre_fork" + section = "Server Hooks" + validator = validate_callable(2) + type = six.callable + + def pre_fork(server, worker): + pass + default = staticmethod(pre_fork) + desc = """\ + Called just before a worker is forked. + + The callable needs to accept two instance variables for the Arbiter and + new Worker. + """ + + +class Postfork(Setting): + name = "post_fork" + section = "Server Hooks" + validator = validate_callable(2) + type = six.callable + + def post_fork(server, worker): + pass + default = staticmethod(post_fork) + desc = """\ + Called just after a worker has been forked. + + The callable needs to accept two instance variables for the Arbiter and + new Worker. + """ + + +class PostWorkerInit(Setting): + name = "post_worker_init" + section = "Server Hooks" + validator = validate_callable(1) + type = six.callable + + def post_worker_init(worker): + pass + + default = staticmethod(post_worker_init) + desc = """\ + Called just after a worker has initialized the application. + + The callable needs to accept one instance variable for the initialized + Worker. + """ + +class WorkerInt(Setting): + name = "worker_int" + section = "Server Hooks" + validator = validate_callable(1) + type = six.callable + + def worker_int(worker): + pass + + default = staticmethod(worker_int) + desc = """\ + Called just after a worker exited on SIGINT or SIGQUIT. + + The callable needs to accept one instance variable for the initialized + Worker. + """ + + +class WorkerAbort(Setting): + name = "worker_abort" + section = "Server Hooks" + validator = validate_callable(1) + type = six.callable + + def worker_abort(worker): + pass + + default = staticmethod(worker_abort) + desc = """\ + Called when a worker received the SIGABRT signal. + + This call generally happens on timeout. + + The callable needs to accept one instance variable for the initialized + Worker. + """ + + +class PreExec(Setting): + name = "pre_exec" + section = "Server Hooks" + validator = validate_callable(1) + type = six.callable + + def pre_exec(server): + pass + default = staticmethod(pre_exec) + desc = """\ + Called just before a new master process is forked. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class PreRequest(Setting): + name = "pre_request" + section = "Server Hooks" + validator = validate_callable(2) + type = six.callable + + def pre_request(worker, req): + worker.log.debug("%s %s" % (req.method, req.path)) + default = staticmethod(pre_request) + desc = """\ + Called just before a worker processes the request. + + The callable needs to accept two instance variables for the Worker and + the Request. + """ + + +class PostRequest(Setting): + name = "post_request" + section = "Server Hooks" + validator = validate_post_request + type = six.callable + + def post_request(worker, req, environ, resp): + pass + default = staticmethod(post_request) + desc = """\ + Called after a worker processes the request. + + The callable needs to accept two instance variables for the Worker and + the Request. + """ + + +class ChildExit(Setting): + name = "child_exit" + section = "Server Hooks" + validator = validate_callable(2) + type = six.callable + + def child_exit(server, worker): + pass + default = staticmethod(child_exit) + desc = """\ + Called just after a worker has been exited, in the master process. + + The callable needs to accept two instance variables for the Arbiter and + the just-exited Worker. + + .. versionadded:: 19.7 + """ + + +class WorkerExit(Setting): + name = "worker_exit" + section = "Server Hooks" + validator = validate_callable(2) + type = six.callable + + def worker_exit(server, worker): + pass + default = staticmethod(worker_exit) + desc = """\ + Called just after a worker has been exited, in the worker process. + + The callable needs to accept two instance variables for the Arbiter and + the just-exited Worker. + """ + + +class NumWorkersChanged(Setting): + name = "nworkers_changed" + section = "Server Hooks" + validator = validate_callable(3) + type = six.callable + + def nworkers_changed(server, new_value, old_value): + pass + default = staticmethod(nworkers_changed) + desc = """\ + Called just after *num_workers* has been changed. + + The callable needs to accept an instance variable of the Arbiter and + two integers of number of workers after and before change. + + If the number of workers is set for the first time, *old_value* would + be ``None``. + """ + +class OnExit(Setting): + name = "on_exit" + section = "Server Hooks" + validator = validate_callable(1) + + def on_exit(server): + pass + + default = staticmethod(on_exit) + desc = """\ + Called just before exiting Gunicorn. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class ProxyProtocol(Setting): + name = "proxy_protocol" + section = "Server Mechanics" + cli = ["--proxy-protocol"] + validator = validate_bool + default = False + action = "store_true" + desc = """\ + Enable detect PROXY protocol (PROXY mode). + + Allow using HTTP and Proxy together. It may be useful for work with + stunnel as HTTPS frontend and Gunicorn as HTTP server. + + PROXY protocol: http://haproxy.1wt.eu/download/1.5/doc/proxy-protocol.txt + + Example for stunnel config:: + + [https] + protocol = proxy + accept = 443 + connect = 80 + cert = /etc/ssl/certs/stunnel.pem + key = /etc/ssl/certs/stunnel.key + """ + + +class ProxyAllowFrom(Setting): + name = "proxy_allow_ips" + section = "Server Mechanics" + cli = ["--proxy-allow-from"] + validator = validate_string_to_list + default = "127.0.0.1" + desc = """\ + Front-end's IPs from which allowed accept proxy requests (comma separate). + + Set to ``*`` to disable checking of Front-end IPs (useful for setups + where you don't know in advance the IP address of Front-end, but + you still trust the environment) + """ + + +class KeyFile(Setting): + name = "keyfile" + section = "SSL" + cli = ["--keyfile"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + SSL key file + """ + + +class CertFile(Setting): + name = "certfile" + section = "SSL" + cli = ["--certfile"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + SSL certificate file + """ + +class SSLVersion(Setting): + name = "ssl_version" + section = "SSL" + cli = ["--ssl-version"] + validator = validate_pos_int + default = ssl.PROTOCOL_SSLv23 + desc = """\ + SSL version to use (see stdlib ssl module's) + + .. versionchanged:: 19.7 + The default value has been changed from ``ssl.PROTOCOL_TLSv1`` to + ``ssl.PROTOCOL_SSLv23``. + """ + +class CertReqs(Setting): + name = "cert_reqs" + section = "SSL" + cli = ["--cert-reqs"] + validator = validate_pos_int + default = ssl.CERT_NONE + desc = """\ + Whether client certificate is required (see stdlib ssl module's) + """ + +class CACerts(Setting): + name = "ca_certs" + section = "SSL" + cli = ["--ca-certs"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + CA certificates file + """ + +class SuppressRaggedEOFs(Setting): + name = "suppress_ragged_eofs" + section = "SSL" + cli = ["--suppress-ragged-eofs"] + action = "store_true" + default = True + validator = validate_bool + desc = """\ + Suppress ragged EOFs (see stdlib ssl module's) + """ + +class DoHandshakeOnConnect(Setting): + name = "do_handshake_on_connect" + section = "SSL" + cli = ["--do-handshake-on-connect"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Whether to perform SSL handshake on socket connect (see stdlib ssl module's) + """ + + +if sys.version_info >= (2, 7): + class Ciphers(Setting): + name = "ciphers" + section = "SSL" + cli = ["--ciphers"] + validator = validate_string + default = 'TLSv1' + desc = """\ + Ciphers to use (see stdlib ssl module's) + """ + + +class PasteGlobalConf(Setting): + name = "raw_paste_global_conf" + action = "append" + section = "Server Mechanics" + cli = ["--paste-global"] + meta = "CONF" + validator = validate_list_string + default = [] + + desc = """\ + Set a PasteDeploy global config variable in ``key=value`` form. + + The option can be specified multiple times. + + The variables are passed to the the PasteDeploy entrypoint. Example:: + + $ gunicorn -b 127.0.0.1:8000 --paste development.ini --paste-global FOO=1 --paste-global BAR=2 + + .. versionadded:: 19.7 + """ diff --git a/py3/lib/python3.6/site-packages/gunicorn/debug.py b/py3/lib/python3.6/site-packages/gunicorn/debug.py new file mode 100644 index 0000000..996fe1b --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/debug.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +"""The debug module contains utilities and functions for better +debugging Gunicorn.""" + +import sys +import linecache +import re +import inspect + +__all__ = ['spew', 'unspew'] + +_token_spliter = re.compile(r'\W+') + + +class Spew(object): + + def __init__(self, trace_names=None, show_values=True): + self.trace_names = trace_names + self.show_values = show_values + + def __call__(self, frame, event, arg): + if event == 'line': + lineno = frame.f_lineno + if '__file__' in frame.f_globals: + filename = frame.f_globals['__file__'] + if (filename.endswith('.pyc') or + filename.endswith('.pyo')): + filename = filename[:-1] + name = frame.f_globals['__name__'] + line = linecache.getline(filename, lineno) + else: + name = '[unknown]' + try: + src = inspect.getsourcelines(frame) + line = src[lineno] + except IOError: + line = 'Unknown code named [%s]. VM instruction #%d' % ( + frame.f_code.co_name, frame.f_lasti) + if self.trace_names is None or name in self.trace_names: + print('%s:%s: %s' % (name, lineno, line.rstrip())) + if not self.show_values: + return self + details = [] + tokens = _token_spliter.split(line) + for tok in tokens: + if tok in frame.f_globals: + details.append('%s=%r' % (tok, frame.f_globals[tok])) + if tok in frame.f_locals: + details.append('%s=%r' % (tok, frame.f_locals[tok])) + if details: + print("\t%s" % ' '.join(details)) + return self + + +def spew(trace_names=None, show_values=False): + """Install a trace hook which writes incredibly detailed logs + about what code is being executed to stdout. + """ + sys.settrace(Spew(trace_names, show_values)) + + +def unspew(): + """Remove the trace hook installed by spew. + """ + sys.settrace(None) diff --git a/py3/lib/python3.6/site-packages/gunicorn/errors.py b/py3/lib/python3.6/site-packages/gunicorn/errors.py new file mode 100644 index 0000000..727d336 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/errors.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# We don't need to call super() in __init__ methods of our +# BaseException and Exception classes because we also define +# our own __str__ methods so there is no need to pass 'message' +# to the base class to get a meaningful output from 'str(exc)'. +# pylint: disable=super-init-not-called + + +# we inherit from BaseException here to make sure to not be caught +# at application level +class HaltServer(BaseException): + def __init__(self, reason, exit_status=1): + self.reason = reason + self.exit_status = exit_status + + def __str__(self): + return "" % (self.reason, self.exit_status) + + +class ConfigError(Exception): + """ Exception raised on config error """ + + +class AppImportError(Exception): + """ Exception raised when loading an application """ diff --git a/py3/lib/python3.6/site-packages/gunicorn/glogging.py b/py3/lib/python3.6/site-packages/gunicorn/glogging.py new file mode 100644 index 0000000..041a74d --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/glogging.py @@ -0,0 +1,478 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import base64 +import binascii +import time +import logging +logging.Logger.manager.emittedNoHandlerWarning = 1 +from logging.config import fileConfig +try: + from logging.config import dictConfig +except ImportError: + # python 2.6 + dictConfig = None +import os +import socket +import sys +import threading +import traceback + +from gunicorn import util +from gunicorn.six import PY3, string_types + + +# syslog facility codes +SYSLOG_FACILITIES = { + "auth": 4, + "authpriv": 10, + "cron": 9, + "daemon": 3, + "ftp": 11, + "kern": 0, + "lpr": 6, + "mail": 2, + "news": 7, + "security": 4, # DEPRECATED + "syslog": 5, + "user": 1, + "uucp": 8, + "local0": 16, + "local1": 17, + "local2": 18, + "local3": 19, + "local4": 20, + "local5": 21, + "local6": 22, + "local7": 23 + } + + +CONFIG_DEFAULTS = dict( + version=1, + disable_existing_loggers=False, + + loggers={ + "root": {"level": "INFO", "handlers": ["console"]}, + "gunicorn.error": { + "level": "INFO", + "handlers": ["error_console"], + "propagate": True, + "qualname": "gunicorn.error" + }, + + "gunicorn.access": { + "level": "INFO", + "handlers": ["console"], + "propagate": True, + "qualname": "gunicorn.access" + } + }, + handlers={ + "console": { + "class": "logging.StreamHandler", + "formatter": "generic", + "stream": "ext://sys.stdout" + }, + "error_console": { + "class": "logging.StreamHandler", + "formatter": "generic", + "stream": "ext://sys.stderr" + }, + }, + formatters={ + "generic": { + "format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s", + "datefmt": "[%Y-%m-%d %H:%M:%S %z]", + "class": "logging.Formatter" + } + } +) + + +def loggers(): + """ get list of all loggers """ + root = logging.root + existing = root.manager.loggerDict.keys() + return [logging.getLogger(name) for name in existing] + + +class SafeAtoms(dict): + + def __init__(self, atoms): + dict.__init__(self) + for key, value in atoms.items(): + if isinstance(value, string_types): + self[key] = value.replace('"', '\\"') + else: + self[key] = value + + def __getitem__(self, k): + if k.startswith("{"): + kl = k.lower() + if kl in self: + return super(SafeAtoms, self).__getitem__(kl) + else: + return "-" + if k in self: + return super(SafeAtoms, self).__getitem__(k) + else: + return '-' + + +def parse_syslog_address(addr): + + # unix domain socket type depends on backend + # SysLogHandler will try both when given None + if addr.startswith("unix://"): + sock_type = None + + # set socket type only if explicitly requested + parts = addr.split("#", 1) + if len(parts) == 2: + addr = parts[0] + if parts[1] == "dgram": + sock_type = socket.SOCK_DGRAM + + return (sock_type, addr.split("unix://")[1]) + + if addr.startswith("udp://"): + addr = addr.split("udp://")[1] + socktype = socket.SOCK_DGRAM + elif addr.startswith("tcp://"): + addr = addr.split("tcp://")[1] + socktype = socket.SOCK_STREAM + else: + raise RuntimeError("invalid syslog address") + + if '[' in addr and ']' in addr: + host = addr.split(']')[0][1:].lower() + elif ':' in addr: + host = addr.split(':')[0].lower() + elif addr == "": + host = "localhost" + else: + host = addr.lower() + + addr = addr.split(']')[-1] + if ":" in addr: + port = addr.split(':', 1)[1] + if not port.isdigit(): + raise RuntimeError("%r is not a valid port number." % port) + port = int(port) + else: + port = 514 + + return (socktype, (host, port)) + + +class Logger(object): + + LOG_LEVELS = { + "critical": logging.CRITICAL, + "error": logging.ERROR, + "warning": logging.WARNING, + "info": logging.INFO, + "debug": logging.DEBUG + } + loglevel = logging.INFO + + error_fmt = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s" + datefmt = r"[%Y-%m-%d %H:%M:%S %z]" + + access_fmt = "%(message)s" + syslog_fmt = "[%(process)d] %(message)s" + + atoms_wrapper_class = SafeAtoms + + def __init__(self, cfg): + self.error_log = logging.getLogger("gunicorn.error") + self.error_log.propagate = False + self.access_log = logging.getLogger("gunicorn.access") + self.access_log.propagate = False + self.error_handlers = [] + self.access_handlers = [] + self.logfile = None + self.lock = threading.Lock() + self.cfg = cfg + self.setup(cfg) + + def setup(self, cfg): + self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO) + self.error_log.setLevel(self.loglevel) + self.access_log.setLevel(logging.INFO) + + # set gunicorn.error handler + if self.cfg.capture_output and cfg.errorlog != "-": + for stream in sys.stdout, sys.stderr: + stream.flush() + + self.logfile = open(cfg.errorlog, 'a+') + os.dup2(self.logfile.fileno(), sys.stdout.fileno()) + os.dup2(self.logfile.fileno(), sys.stderr.fileno()) + + self._set_handler(self.error_log, cfg.errorlog, + logging.Formatter(self.error_fmt, self.datefmt)) + + # set gunicorn.access handler + if cfg.accesslog is not None: + self._set_handler(self.access_log, cfg.accesslog, + fmt=logging.Formatter(self.access_fmt), stream=sys.stdout) + + # set syslog handler + if cfg.syslog: + self._set_syslog_handler( + self.error_log, cfg, self.syslog_fmt, "error" + ) + if not cfg.disable_redirect_access_to_syslog: + self._set_syslog_handler( + self.access_log, cfg, self.syslog_fmt, "access" + ) + + if dictConfig is None and cfg.logconfig_dict: + util.warn("Dictionary-based log configuration requires " + "Python 2.7 or above.") + + if dictConfig and cfg.logconfig_dict: + config = CONFIG_DEFAULTS.copy() + config.update(cfg.logconfig_dict) + try: + dictConfig(config) + except ( + AttributeError, + ImportError, + ValueError, + TypeError + ) as exc: + raise RuntimeError(str(exc)) + elif cfg.logconfig: + if os.path.exists(cfg.logconfig): + defaults = CONFIG_DEFAULTS.copy() + defaults['__file__'] = cfg.logconfig + defaults['here'] = os.path.dirname(cfg.logconfig) + fileConfig(cfg.logconfig, defaults=defaults, + disable_existing_loggers=False) + else: + msg = "Error: log config '%s' not found" + raise RuntimeError(msg % cfg.logconfig) + + def critical(self, msg, *args, **kwargs): + self.error_log.critical(msg, *args, **kwargs) + + def error(self, msg, *args, **kwargs): + self.error_log.error(msg, *args, **kwargs) + + def warning(self, msg, *args, **kwargs): + self.error_log.warning(msg, *args, **kwargs) + + def info(self, msg, *args, **kwargs): + self.error_log.info(msg, *args, **kwargs) + + def debug(self, msg, *args, **kwargs): + self.error_log.debug(msg, *args, **kwargs) + + def exception(self, msg, *args, **kwargs): + self.error_log.exception(msg, *args, **kwargs) + + def log(self, lvl, msg, *args, **kwargs): + if isinstance(lvl, string_types): + lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO) + self.error_log.log(lvl, msg, *args, **kwargs) + + def atoms(self, resp, req, environ, request_time): + """ Gets atoms for log formating. + """ + status = resp.status + if isinstance(status, str): + status = status.split(None, 1)[0] + atoms = { + 'h': environ.get('REMOTE_ADDR', '-'), + 'l': '-', + 'u': self._get_user(environ) or '-', + 't': self.now(), + 'r': "%s %s %s" % (environ['REQUEST_METHOD'], + environ['RAW_URI'], environ["SERVER_PROTOCOL"]), + 's': status, + 'm': environ.get('REQUEST_METHOD'), + 'U': environ.get('PATH_INFO'), + 'q': environ.get('QUERY_STRING'), + 'H': environ.get('SERVER_PROTOCOL'), + 'b': getattr(resp, 'sent', None) is not None and str(resp.sent) or '-', + 'B': getattr(resp, 'sent', None), + 'f': environ.get('HTTP_REFERER', '-'), + 'a': environ.get('HTTP_USER_AGENT', '-'), + 'T': request_time.seconds, + 'D': (request_time.seconds*1000000) + request_time.microseconds, + 'L': "%d.%06d" % (request_time.seconds, request_time.microseconds), + 'p': "<%s>" % os.getpid() + } + + # add request headers + if hasattr(req, 'headers'): + req_headers = req.headers + else: + req_headers = req + + if hasattr(req_headers, "items"): + req_headers = req_headers.items() + + atoms.update(dict([("{%s}i" % k.lower(), v) for k, v in req_headers])) + + resp_headers = resp.headers + if hasattr(resp_headers, "items"): + resp_headers = resp_headers.items() + + # add response headers + atoms.update(dict([("{%s}o" % k.lower(), v) for k, v in resp_headers])) + + # add environ variables + environ_variables = environ.items() + atoms.update(dict([("{%s}e" % k.lower(), v) for k, v in environ_variables])) + + return atoms + + def access(self, resp, req, environ, request_time): + """ See http://httpd.apache.org/docs/2.0/logs.html#combined + for format details + """ + + if not (self.cfg.accesslog or self.cfg.logconfig or + self.cfg.logconfig_dict or + (self.cfg.syslog and not self.cfg.disable_redirect_access_to_syslog)): + return + + # wrap atoms: + # - make sure atoms will be test case insensitively + # - if atom doesn't exist replace it by '-' + safe_atoms = self.atoms_wrapper_class(self.atoms(resp, req, environ, + request_time)) + + try: + self.access_log.info(self.cfg.access_log_format, safe_atoms) + except: + self.error(traceback.format_exc()) + + def now(self): + """ return date in Apache Common Log Format """ + return time.strftime('[%d/%b/%Y:%H:%M:%S %z]') + + def reopen_files(self): + if self.cfg.capture_output and self.cfg.errorlog != "-": + for stream in sys.stdout, sys.stderr: + stream.flush() + + with self.lock: + if self.logfile is not None: + self.logfile.close() + self.logfile = open(self.cfg.errorlog, 'a+') + os.dup2(self.logfile.fileno(), sys.stdout.fileno()) + os.dup2(self.logfile.fileno(), sys.stderr.fileno()) + + + for log in loggers(): + for handler in log.handlers: + if isinstance(handler, logging.FileHandler): + handler.acquire() + try: + if handler.stream: + handler.close() + handler.stream = handler._open() + finally: + handler.release() + + def close_on_exec(self): + for log in loggers(): + for handler in log.handlers: + if isinstance(handler, logging.FileHandler): + handler.acquire() + try: + if handler.stream: + util.close_on_exec(handler.stream.fileno()) + finally: + handler.release() + + def _get_gunicorn_handler(self, log): + for h in log.handlers: + if getattr(h, "_gunicorn", False): + return h + + def _set_handler(self, log, output, fmt, stream=None): + # remove previous gunicorn log handler + h = self._get_gunicorn_handler(log) + if h: + log.handlers.remove(h) + + if output is not None: + if output == "-": + h = logging.StreamHandler(stream) + else: + util.check_is_writeable(output) + h = logging.FileHandler(output) + # make sure the user can reopen the file + try: + os.chown(h.baseFilename, self.cfg.user, self.cfg.group) + except OSError: + # it's probably OK there, we assume the user has given + # /dev/null as a parameter. + pass + + h.setFormatter(fmt) + h._gunicorn = True + log.addHandler(h) + + def _set_syslog_handler(self, log, cfg, fmt, name): + # setup format + if not cfg.syslog_prefix: + prefix = cfg.proc_name.replace(":", ".") + else: + prefix = cfg.syslog_prefix + + prefix = "gunicorn.%s.%s" % (prefix, name) + + # set format + fmt = logging.Formatter(r"%s: %s" % (prefix, fmt)) + + # syslog facility + try: + facility = SYSLOG_FACILITIES[cfg.syslog_facility.lower()] + except KeyError: + raise RuntimeError("unknown facility name") + + # parse syslog address + socktype, addr = parse_syslog_address(cfg.syslog_addr) + + # finally setup the syslog handler + if sys.version_info >= (2, 7): + h = logging.handlers.SysLogHandler(address=addr, + facility=facility, socktype=socktype) + else: + # socktype is only supported in 2.7 and sup + # fix issue #541 + h = logging.handlers.SysLogHandler(address=addr, + facility=facility) + + h.setFormatter(fmt) + h._gunicorn = True + log.addHandler(h) + + def _get_user(self, environ): + user = None + http_auth = environ.get("HTTP_AUTHORIZATION") + if http_auth and http_auth.startswith('Basic'): + auth = http_auth.split(" ", 1) + if len(auth) == 2: + try: + # b64decode doesn't accept unicode in Python < 3.3 + # so we need to convert it to a byte string + auth = base64.b64decode(auth[1].strip().encode('utf-8')) + if PY3: # b64decode returns a byte string in Python 3 + auth = auth.decode('utf-8') + auth = auth.split(":", 1) + except (TypeError, binascii.Error, UnicodeDecodeError) as exc: + self.debug("Couldn't get username: %s", exc) + return user + if len(auth) == 2: + user = auth[0] + return user diff --git a/py3/lib/python3.6/site-packages/gunicorn/http/__init__.py b/py3/lib/python3.6/site-packages/gunicorn/http/__init__.py new file mode 100644 index 0000000..1da6f3e --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/http/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from gunicorn.http.message import Message, Request +from gunicorn.http.parser import RequestParser + +__all__ = ['Message', 'Request', 'RequestParser'] diff --git a/py3/lib/python3.6/site-packages/gunicorn/http/_sendfile.py b/py3/lib/python3.6/site-packages/gunicorn/http/_sendfile.py new file mode 100644 index 0000000..1764cb3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/http/_sendfile.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import errno +import os +import sys + +try: + import ctypes + import ctypes.util +except MemoryError: + # selinux execmem denial + # https://bugzilla.redhat.com/show_bug.cgi?id=488396 + raise ImportError + +SUPPORTED_PLATFORMS = ( + 'darwin', + 'freebsd', + 'dragonfly', + 'linux2') + +if sys.platform not in SUPPORTED_PLATFORMS: + raise ImportError("sendfile isn't supported on this platform") + +_libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True) +_sendfile = _libc.sendfile + + +def sendfile(fdout, fdin, offset, nbytes): + if sys.platform == 'darwin': + _sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64, + ctypes.POINTER(ctypes.c_uint64), ctypes.c_voidp, + ctypes.c_int] + _nbytes = ctypes.c_uint64(nbytes) + result = _sendfile(fdin, fdout, offset, _nbytes, None, 0) + + if result == -1: + e = ctypes.get_errno() + if e == errno.EAGAIN and _nbytes.value is not None: + return _nbytes.value + raise OSError(e, os.strerror(e)) + return _nbytes.value + elif sys.platform in ('freebsd', 'dragonfly',): + _sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64, + ctypes.c_uint64, ctypes.c_voidp, + ctypes.POINTER(ctypes.c_uint64), ctypes.c_int] + _sbytes = ctypes.c_uint64() + result = _sendfile(fdin, fdout, offset, nbytes, None, _sbytes, 0) + if result == -1: + e = ctypes.get_errno() + if e == errno.EAGAIN and _sbytes.value is not None: + return _sbytes.value + raise OSError(e, os.strerror(e)) + return _sbytes.value + + else: + _sendfile.argtypes = [ctypes.c_int, ctypes.c_int, + ctypes.POINTER(ctypes.c_uint64), ctypes.c_size_t] + + _offset = ctypes.c_uint64(offset) + sent = _sendfile(fdout, fdin, _offset, nbytes) + if sent == -1: + e = ctypes.get_errno() + raise OSError(e, os.strerror(e)) + return sent diff --git a/py3/lib/python3.6/site-packages/gunicorn/http/body.py b/py3/lib/python3.6/site-packages/gunicorn/http/body.py new file mode 100644 index 0000000..fb8633e --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/http/body.py @@ -0,0 +1,259 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator, + InvalidChunkSize) +from gunicorn import six + + +class ChunkedReader(object): + def __init__(self, req, unreader): + self.req = req + self.parser = self.parse_chunked(unreader) + self.buf = six.BytesIO() + + def read(self, size): + if not isinstance(size, six.integer_types): + raise TypeError("size must be an integral type") + if size < 0: + raise ValueError("Size must be positive.") + if size == 0: + return b"" + + if self.parser: + while self.buf.tell() < size: + try: + self.buf.write(six.next(self.parser)) + except StopIteration: + self.parser = None + break + + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = six.BytesIO() + self.buf.write(rest) + return ret + + def parse_trailers(self, unreader, data): + buf = six.BytesIO() + buf.write(data) + + idx = buf.getvalue().find(b"\r\n\r\n") + done = buf.getvalue()[:2] == b"\r\n" + while idx < 0 and not done: + self.get_data(unreader, buf) + idx = buf.getvalue().find(b"\r\n\r\n") + done = buf.getvalue()[:2] == b"\r\n" + if done: + unreader.unread(buf.getvalue()[2:]) + return b"" + self.req.trailers = self.req.parse_headers(buf.getvalue()[:idx]) + unreader.unread(buf.getvalue()[idx + 4:]) + + def parse_chunked(self, unreader): + (size, rest) = self.parse_chunk_size(unreader) + while size > 0: + while size > len(rest): + size -= len(rest) + yield rest + rest = unreader.read() + if not rest: + raise NoMoreData() + yield rest[:size] + # Remove \r\n after chunk + rest = rest[size:] + while len(rest) < 2: + rest += unreader.read() + if rest[:2] != b'\r\n': + raise ChunkMissingTerminator(rest[:2]) + (size, rest) = self.parse_chunk_size(unreader, data=rest[2:]) + + def parse_chunk_size(self, unreader, data=None): + buf = six.BytesIO() + if data is not None: + buf.write(data) + + idx = buf.getvalue().find(b"\r\n") + while idx < 0: + self.get_data(unreader, buf) + idx = buf.getvalue().find(b"\r\n") + + data = buf.getvalue() + line, rest_chunk = data[:idx], data[idx + 2:] + + chunk_size = line.split(b";", 1)[0].strip() + try: + chunk_size = int(chunk_size, 16) + except ValueError: + raise InvalidChunkSize(chunk_size) + + if chunk_size == 0: + try: + self.parse_trailers(unreader, rest_chunk) + except NoMoreData: + pass + return (0, None) + return (chunk_size, rest_chunk) + + def get_data(self, unreader, buf): + data = unreader.read() + if not data: + raise NoMoreData() + buf.write(data) + + +class LengthReader(object): + def __init__(self, unreader, length): + self.unreader = unreader + self.length = length + + def read(self, size): + if not isinstance(size, six.integer_types): + raise TypeError("size must be an integral type") + + size = min(self.length, size) + if size < 0: + raise ValueError("Size must be positive.") + if size == 0: + return b"" + + buf = six.BytesIO() + data = self.unreader.read() + while data: + buf.write(data) + if buf.tell() >= size: + break + data = self.unreader.read() + + buf = buf.getvalue() + ret, rest = buf[:size], buf[size:] + self.unreader.unread(rest) + self.length -= size + return ret + + +class EOFReader(object): + def __init__(self, unreader): + self.unreader = unreader + self.buf = six.BytesIO() + self.finished = False + + def read(self, size): + if not isinstance(size, six.integer_types): + raise TypeError("size must be an integral type") + if size < 0: + raise ValueError("Size must be positive.") + if size == 0: + return b"" + + if self.finished: + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = six.BytesIO() + self.buf.write(rest) + return ret + + data = self.unreader.read() + while data: + self.buf.write(data) + if self.buf.tell() > size: + break + data = self.unreader.read() + + if not data: + self.finished = True + + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = six.BytesIO() + self.buf.write(rest) + return ret + + +class Body(object): + def __init__(self, reader): + self.reader = reader + self.buf = six.BytesIO() + + def __iter__(self): + return self + + def __next__(self): + ret = self.readline() + if not ret: + raise StopIteration() + return ret + next = __next__ + + def getsize(self, size): + if size is None: + return six.MAXSIZE + elif not isinstance(size, six.integer_types): + raise TypeError("size must be an integral type") + elif size < 0: + return six.MAXSIZE + return size + + def read(self, size=None): + size = self.getsize(size) + if size == 0: + return b"" + + if size < self.buf.tell(): + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = six.BytesIO() + self.buf.write(rest) + return ret + + while size > self.buf.tell(): + data = self.reader.read(1024) + if not data: + break + self.buf.write(data) + + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = six.BytesIO() + self.buf.write(rest) + return ret + + def readline(self, size=None): + size = self.getsize(size) + if size == 0: + return b"" + + data = self.buf.getvalue() + self.buf = six.BytesIO() + + ret = [] + while 1: + idx = data.find(b"\n", 0, size) + idx = idx + 1 if idx >= 0 else size if len(data) >= size else 0 + if idx: + ret.append(data[:idx]) + self.buf.write(data[idx:]) + break + + ret.append(data) + size -= len(data) + data = self.reader.read(min(1024, size)) + if not data: + break + + return b"".join(ret) + + def readlines(self, size=None): + ret = [] + data = self.read() + while data: + pos = data.find(b"\n") + if pos < 0: + ret.append(data) + data = b"" + else: + line, data = data[:pos + 1], data[pos + 1:] + ret.append(line) + return ret diff --git a/py3/lib/python3.6/site-packages/gunicorn/http/errors.py b/py3/lib/python3.6/site-packages/gunicorn/http/errors.py new file mode 100644 index 0000000..7839ef0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/http/errors.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# We don't need to call super() in __init__ methods of our +# BaseException and Exception classes because we also define +# our own __str__ methods so there is no need to pass 'message' +# to the base class to get a meaningful output from 'str(exc)'. +# pylint: disable=super-init-not-called + + +class ParseException(Exception): + pass + + +class NoMoreData(IOError): + def __init__(self, buf=None): + self.buf = buf + + def __str__(self): + return "No more data after: %r" % self.buf + + +class InvalidRequestLine(ParseException): + def __init__(self, req): + self.req = req + self.code = 400 + + def __str__(self): + return "Invalid HTTP request line: %r" % self.req + + +class InvalidRequestMethod(ParseException): + def __init__(self, method): + self.method = method + + def __str__(self): + return "Invalid HTTP method: %r" % self.method + + +class InvalidHTTPVersion(ParseException): + def __init__(self, version): + self.version = version + + def __str__(self): + return "Invalid HTTP Version: %r" % self.version + + +class InvalidHeader(ParseException): + def __init__(self, hdr, req=None): + self.hdr = hdr + self.req = req + + def __str__(self): + return "Invalid HTTP Header: %r" % self.hdr + + +class InvalidHeaderName(ParseException): + def __init__(self, hdr): + self.hdr = hdr + + def __str__(self): + return "Invalid HTTP header name: %r" % self.hdr + + +class InvalidChunkSize(IOError): + def __init__(self, data): + self.data = data + + def __str__(self): + return "Invalid chunk size: %r" % self.data + + +class ChunkMissingTerminator(IOError): + def __init__(self, term): + self.term = term + + def __str__(self): + return "Invalid chunk terminator is not '\\r\\n': %r" % self.term + + +class LimitRequestLine(ParseException): + def __init__(self, size, max_size): + self.size = size + self.max_size = max_size + + def __str__(self): + return "Request Line is too large (%s > %s)" % (self.size, self.max_size) + + +class LimitRequestHeaders(ParseException): + def __init__(self, msg): + self.msg = msg + + def __str__(self): + return self.msg + + +class InvalidProxyLine(ParseException): + def __init__(self, line): + self.line = line + self.code = 400 + + def __str__(self): + return "Invalid PROXY line: %r" % self.line + + +class ForbiddenProxyRequest(ParseException): + def __init__(self, host): + self.host = host + self.code = 403 + + def __str__(self): + return "Proxy request from %r not allowed" % self.host + + +class InvalidSchemeHeaders(ParseException): + def __str__(self): + return "Contradictory scheme headers" diff --git a/py3/lib/python3.6/site-packages/gunicorn/http/message.py b/py3/lib/python3.6/site-packages/gunicorn/http/message.py new file mode 100644 index 0000000..2700b32 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/http/message.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import re +import socket +from errno import ENOTCONN + +from gunicorn._compat import bytes_to_str +from gunicorn.http.unreader import SocketUnreader +from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body +from gunicorn.http.errors import (InvalidHeader, InvalidHeaderName, NoMoreData, + InvalidRequestLine, InvalidRequestMethod, InvalidHTTPVersion, + LimitRequestLine, LimitRequestHeaders) +from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest +from gunicorn.http.errors import InvalidSchemeHeaders +from gunicorn.six import BytesIO, string_types +from gunicorn.util import split_request_uri + +MAX_REQUEST_LINE = 8190 +MAX_HEADERS = 32768 +DEFAULT_MAX_HEADERFIELD_SIZE = 8190 + +HEADER_RE = re.compile(r"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\"]") +METH_RE = re.compile(r"[A-Z0-9$-_.]{3,20}") +VERSION_RE = re.compile(r"HTTP/(\d+)\.(\d+)") + + +class Message(object): + def __init__(self, cfg, unreader): + self.cfg = cfg + self.unreader = unreader + self.version = None + self.headers = [] + self.trailers = [] + self.body = None + self.scheme = "https" if cfg.is_ssl else "http" + + # set headers limits + self.limit_request_fields = cfg.limit_request_fields + if (self.limit_request_fields <= 0 + or self.limit_request_fields > MAX_HEADERS): + self.limit_request_fields = MAX_HEADERS + self.limit_request_field_size = cfg.limit_request_field_size + if self.limit_request_field_size < 0: + self.limit_request_field_size = DEFAULT_MAX_HEADERFIELD_SIZE + + # set max header buffer size + max_header_field_size = self.limit_request_field_size or DEFAULT_MAX_HEADERFIELD_SIZE + self.max_buffer_headers = self.limit_request_fields * \ + (max_header_field_size + 2) + 4 + + unused = self.parse(self.unreader) + self.unreader.unread(unused) + self.set_body_reader() + + def parse(self, unreader): + raise NotImplementedError() + + def parse_headers(self, data): + cfg = self.cfg + headers = [] + + # Split lines on \r\n keeping the \r\n on each line + lines = [bytes_to_str(line) + "\r\n" for line in data.split(b"\r\n")] + + # handle scheme headers + scheme_header = False + secure_scheme_headers = {} + if '*' in cfg.forwarded_allow_ips: + secure_scheme_headers = cfg.secure_scheme_headers + elif isinstance(self.unreader, SocketUnreader): + remote_addr = self.unreader.sock.getpeername() + if isinstance(remote_addr, tuple): + remote_host = remote_addr[0] + if remote_host in cfg.forwarded_allow_ips: + secure_scheme_headers = cfg.secure_scheme_headers + elif isinstance(remote_addr, string_types): + secure_scheme_headers = cfg.secure_scheme_headers + + # Parse headers into key/value pairs paying attention + # to continuation lines. + while lines: + if len(headers) >= self.limit_request_fields: + raise LimitRequestHeaders("limit request headers fields") + + # Parse initial header name : value pair. + curr = lines.pop(0) + header_length = len(curr) + if curr.find(":") < 0: + raise InvalidHeader(curr.strip()) + name, value = curr.split(":", 1) + name = name.rstrip(" \t").upper() + if HEADER_RE.search(name): + raise InvalidHeaderName(name) + + name, value = name.strip(), [value.lstrip()] + + # Consume value continuation lines + while lines and lines[0].startswith((" ", "\t")): + curr = lines.pop(0) + header_length += len(curr) + if header_length > self.limit_request_field_size > 0: + raise LimitRequestHeaders("limit request headers " + + "fields size") + value.append(curr) + value = ''.join(value).rstrip() + + if header_length > self.limit_request_field_size > 0: + raise LimitRequestHeaders("limit request headers fields size") + + if name in secure_scheme_headers: + secure = value == secure_scheme_headers[name] + scheme = "https" if secure else "http" + if scheme_header: + if scheme != self.scheme: + raise InvalidSchemeHeaders() + else: + scheme_header = True + self.scheme = scheme + + headers.append((name, value)) + + return headers + + def set_body_reader(self): + chunked = False + content_length = None + for (name, value) in self.headers: + if name == "CONTENT-LENGTH": + content_length = value + elif name == "TRANSFER-ENCODING": + chunked = value.lower() == "chunked" + elif name == "SEC-WEBSOCKET-KEY1": + content_length = 8 + + if chunked: + self.body = Body(ChunkedReader(self, self.unreader)) + elif content_length is not None: + try: + content_length = int(content_length) + except ValueError: + raise InvalidHeader("CONTENT-LENGTH", req=self) + + if content_length < 0: + raise InvalidHeader("CONTENT-LENGTH", req=self) + + self.body = Body(LengthReader(self.unreader, content_length)) + else: + self.body = Body(EOFReader(self.unreader)) + + def should_close(self): + for (h, v) in self.headers: + if h == "CONNECTION": + v = v.lower().strip() + if v == "close": + return True + elif v == "keep-alive": + return False + break + return self.version <= (1, 0) + + +class Request(Message): + def __init__(self, cfg, unreader, req_number=1): + self.method = None + self.uri = None + self.path = None + self.query = None + self.fragment = None + + # get max request line size + self.limit_request_line = cfg.limit_request_line + if (self.limit_request_line < 0 + or self.limit_request_line >= MAX_REQUEST_LINE): + self.limit_request_line = MAX_REQUEST_LINE + + self.req_number = req_number + self.proxy_protocol_info = None + super(Request, self).__init__(cfg, unreader) + + def get_data(self, unreader, buf, stop=False): + data = unreader.read() + if not data: + if stop: + raise StopIteration() + raise NoMoreData(buf.getvalue()) + buf.write(data) + + def parse(self, unreader): + buf = BytesIO() + self.get_data(unreader, buf, stop=True) + + # get request line + line, rbuf = self.read_line(unreader, buf, self.limit_request_line) + + # proxy protocol + if self.proxy_protocol(bytes_to_str(line)): + # get next request line + buf = BytesIO() + buf.write(rbuf) + line, rbuf = self.read_line(unreader, buf, self.limit_request_line) + + self.parse_request_line(line) + buf = BytesIO() + buf.write(rbuf) + + # Headers + data = buf.getvalue() + idx = data.find(b"\r\n\r\n") + + done = data[:2] == b"\r\n" + while True: + idx = data.find(b"\r\n\r\n") + done = data[:2] == b"\r\n" + + if idx < 0 and not done: + self.get_data(unreader, buf) + data = buf.getvalue() + if len(data) > self.max_buffer_headers: + raise LimitRequestHeaders("max buffer headers") + else: + break + + if done: + self.unreader.unread(data[2:]) + return b"" + + self.headers = self.parse_headers(data[:idx]) + + ret = data[idx + 4:] + buf = None + return ret + + def read_line(self, unreader, buf, limit=0): + data = buf.getvalue() + + while True: + idx = data.find(b"\r\n") + if idx >= 0: + # check if the request line is too large + if idx > limit > 0: + raise LimitRequestLine(idx, limit) + break + elif len(data) - 2 > limit > 0: + raise LimitRequestLine(len(data), limit) + self.get_data(unreader, buf) + data = buf.getvalue() + + return (data[:idx], # request line, + data[idx + 2:]) # residue in the buffer, skip \r\n + + def proxy_protocol(self, line): + """\ + Detect, check and parse proxy protocol. + + :raises: ForbiddenProxyRequest, InvalidProxyLine. + :return: True for proxy protocol line else False + """ + if not self.cfg.proxy_protocol: + return False + + if self.req_number != 1: + return False + + if not line.startswith("PROXY"): + return False + + self.proxy_protocol_access_check() + self.parse_proxy_protocol(line) + + return True + + def proxy_protocol_access_check(self): + # check in allow list + if isinstance(self.unreader, SocketUnreader): + try: + remote_host = self.unreader.sock.getpeername()[0] + except socket.error as e: + if e.args[0] == ENOTCONN: + raise ForbiddenProxyRequest("UNKNOW") + raise + if ("*" not in self.cfg.proxy_allow_ips and + remote_host not in self.cfg.proxy_allow_ips): + raise ForbiddenProxyRequest(remote_host) + + def parse_proxy_protocol(self, line): + bits = line.split() + + if len(bits) != 6: + raise InvalidProxyLine(line) + + # Extract data + proto = bits[1] + s_addr = bits[2] + d_addr = bits[3] + + # Validation + if proto not in ["TCP4", "TCP6"]: + raise InvalidProxyLine("protocol '%s' not supported" % proto) + if proto == "TCP4": + try: + socket.inet_pton(socket.AF_INET, s_addr) + socket.inet_pton(socket.AF_INET, d_addr) + except socket.error: + raise InvalidProxyLine(line) + elif proto == "TCP6": + try: + socket.inet_pton(socket.AF_INET6, s_addr) + socket.inet_pton(socket.AF_INET6, d_addr) + except socket.error: + raise InvalidProxyLine(line) + + try: + s_port = int(bits[4]) + d_port = int(bits[5]) + except ValueError: + raise InvalidProxyLine("invalid port %s" % line) + + if not ((0 <= s_port <= 65535) and (0 <= d_port <= 65535)): + raise InvalidProxyLine("invalid port %s" % line) + + # Set data + self.proxy_protocol_info = { + "proxy_protocol": proto, + "client_addr": s_addr, + "client_port": s_port, + "proxy_addr": d_addr, + "proxy_port": d_port + } + + def parse_request_line(self, line_bytes): + bits = [bytes_to_str(bit) for bit in line_bytes.split(None, 2)] + if len(bits) != 3: + raise InvalidRequestLine(bytes_to_str(line_bytes)) + + # Method + if not METH_RE.match(bits[0]): + raise InvalidRequestMethod(bits[0]) + self.method = bits[0].upper() + + # URI + self.uri = bits[1] + + try: + parts = split_request_uri(self.uri) + except ValueError: + raise InvalidRequestLine(bytes_to_str(line_bytes)) + self.path = parts.path or "" + self.query = parts.query or "" + self.fragment = parts.fragment or "" + + # Version + match = VERSION_RE.match(bits[2]) + if match is None: + raise InvalidHTTPVersion(bits[2]) + self.version = (int(match.group(1)), int(match.group(2))) + + def set_body_reader(self): + super(Request, self).set_body_reader() + if isinstance(self.body.reader, EOFReader): + self.body = Body(LengthReader(self.unreader, 0)) diff --git a/py3/lib/python3.6/site-packages/gunicorn/http/parser.py b/py3/lib/python3.6/site-packages/gunicorn/http/parser.py new file mode 100644 index 0000000..a4a0f1e --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/http/parser.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from gunicorn.http.message import Request +from gunicorn.http.unreader import SocketUnreader, IterUnreader + + +class Parser(object): + + mesg_class = None + + def __init__(self, cfg, source): + self.cfg = cfg + if hasattr(source, "recv"): + self.unreader = SocketUnreader(source) + else: + self.unreader = IterUnreader(source) + self.mesg = None + + # request counter (for keepalive connetions) + self.req_count = 0 + + def __iter__(self): + return self + + def __next__(self): + # Stop if HTTP dictates a stop. + if self.mesg and self.mesg.should_close(): + raise StopIteration() + + # Discard any unread body of the previous message + if self.mesg: + data = self.mesg.body.read(8192) + while data: + data = self.mesg.body.read(8192) + + # Parse the next request + self.req_count += 1 + self.mesg = self.mesg_class(self.cfg, self.unreader, self.req_count) + if not self.mesg: + raise StopIteration() + return self.mesg + + next = __next__ + + +class RequestParser(Parser): + + mesg_class = Request diff --git a/py3/lib/python3.6/site-packages/gunicorn/http/unreader.py b/py3/lib/python3.6/site-packages/gunicorn/http/unreader.py new file mode 100644 index 0000000..9f312a8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/http/unreader.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os + +from gunicorn import six + +# Classes that can undo reading data from +# a given type of data source. + + +class Unreader(object): + def __init__(self): + self.buf = six.BytesIO() + + def chunk(self): + raise NotImplementedError() + + def read(self, size=None): + if size is not None and not isinstance(size, six.integer_types): + raise TypeError("size parameter must be an int or long.") + + if size is not None: + if size == 0: + return b"" + if size < 0: + size = None + + self.buf.seek(0, os.SEEK_END) + + if size is None and self.buf.tell(): + ret = self.buf.getvalue() + self.buf = six.BytesIO() + return ret + if size is None: + d = self.chunk() + return d + + while self.buf.tell() < size: + chunk = self.chunk() + if not chunk: + ret = self.buf.getvalue() + self.buf = six.BytesIO() + return ret + self.buf.write(chunk) + data = self.buf.getvalue() + self.buf = six.BytesIO() + self.buf.write(data[size:]) + return data[:size] + + def unread(self, data): + self.buf.seek(0, os.SEEK_END) + self.buf.write(data) + + +class SocketUnreader(Unreader): + def __init__(self, sock, max_chunk=8192): + super(SocketUnreader, self).__init__() + self.sock = sock + self.mxchunk = max_chunk + + def chunk(self): + return self.sock.recv(self.mxchunk) + + +class IterUnreader(Unreader): + def __init__(self, iterable): + super(IterUnreader, self).__init__() + self.iter = iter(iterable) + + def chunk(self): + if not self.iter: + return b"" + try: + return six.next(self.iter) + except StopIteration: + self.iter = None + return b"" diff --git a/py3/lib/python3.6/site-packages/gunicorn/http/wsgi.py b/py3/lib/python3.6/site-packages/gunicorn/http/wsgi.py new file mode 100644 index 0000000..ff75974 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/http/wsgi.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import io +import logging +import os +import re +import sys + +from gunicorn._compat import unquote_to_wsgi_str +from gunicorn.http.message import HEADER_RE +from gunicorn.http.errors import InvalidHeader, InvalidHeaderName +from gunicorn.six import string_types, binary_type, reraise +from gunicorn import SERVER_SOFTWARE +import gunicorn.util as util + +try: + # Python 3.3 has os.sendfile(). + from os import sendfile +except ImportError: + try: + from ._sendfile import sendfile + except ImportError: + sendfile = None + +# Send files in at most 1GB blocks as some operating systems can have problems +# with sending files in blocks over 2GB. +BLKSIZE = 0x3FFFFFFF + +HEADER_VALUE_RE = re.compile(r'[\x00-\x1F\x7F]') + +log = logging.getLogger(__name__) + + +class FileWrapper(object): + + def __init__(self, filelike, blksize=8192): + self.filelike = filelike + self.blksize = blksize + if hasattr(filelike, 'close'): + self.close = filelike.close + + def __getitem__(self, key): + data = self.filelike.read(self.blksize) + if data: + return data + raise IndexError + + +class WSGIErrorsWrapper(io.RawIOBase): + + def __init__(self, cfg): + # There is no public __init__ method for RawIOBase so + # we don't need to call super() in the __init__ method. + # pylint: disable=super-init-not-called + errorlog = logging.getLogger("gunicorn.error") + handlers = errorlog.handlers + self.streams = [] + + if cfg.errorlog == "-": + self.streams.append(sys.stderr) + handlers = handlers[1:] + + for h in handlers: + if hasattr(h, "stream"): + self.streams.append(h.stream) + + def write(self, data): + for stream in self.streams: + try: + stream.write(data) + except UnicodeError: + stream.write(data.encode("UTF-8")) + stream.flush() + + +def base_environ(cfg): + return { + "wsgi.errors": WSGIErrorsWrapper(cfg), + "wsgi.version": (1, 0), + "wsgi.multithread": False, + "wsgi.multiprocess": (cfg.workers > 1), + "wsgi.run_once": False, + "wsgi.file_wrapper": FileWrapper, + "SERVER_SOFTWARE": SERVER_SOFTWARE, + } + + +def default_environ(req, sock, cfg): + env = base_environ(cfg) + env.update({ + "wsgi.input": req.body, + "gunicorn.socket": sock, + "REQUEST_METHOD": req.method, + "QUERY_STRING": req.query, + "RAW_URI": req.uri, + "SERVER_PROTOCOL": "HTTP/%s" % ".".join([str(v) for v in req.version]) + }) + return env + + +def proxy_environ(req): + info = req.proxy_protocol_info + + if not info: + return {} + + return { + "PROXY_PROTOCOL": info["proxy_protocol"], + "REMOTE_ADDR": info["client_addr"], + "REMOTE_PORT": str(info["client_port"]), + "PROXY_ADDR": info["proxy_addr"], + "PROXY_PORT": str(info["proxy_port"]), + } + + +def create(req, sock, client, server, cfg): + resp = Response(req, sock, cfg) + + # set initial environ + environ = default_environ(req, sock, cfg) + + # default variables + host = None + script_name = os.environ.get("SCRIPT_NAME", "") + + # add the headers to the environ + for hdr_name, hdr_value in req.headers: + if hdr_name == "EXPECT": + # handle expect + if hdr_value.lower() == "100-continue": + sock.send(b"HTTP/1.1 100 Continue\r\n\r\n") + elif hdr_name == 'HOST': + host = hdr_value + elif hdr_name == "SCRIPT_NAME": + script_name = hdr_value + elif hdr_name == "CONTENT-TYPE": + environ['CONTENT_TYPE'] = hdr_value + continue + elif hdr_name == "CONTENT-LENGTH": + environ['CONTENT_LENGTH'] = hdr_value + continue + + key = 'HTTP_' + hdr_name.replace('-', '_') + if key in environ: + hdr_value = "%s,%s" % (environ[key], hdr_value) + environ[key] = hdr_value + + # set the url scheme + environ['wsgi.url_scheme'] = req.scheme + + # set the REMOTE_* keys in environ + # authors should be aware that REMOTE_HOST and REMOTE_ADDR + # may not qualify the remote addr: + # http://www.ietf.org/rfc/rfc3875 + if isinstance(client, string_types): + environ['REMOTE_ADDR'] = client + elif isinstance(client, binary_type): + environ['REMOTE_ADDR'] = client.decode() + else: + environ['REMOTE_ADDR'] = client[0] + environ['REMOTE_PORT'] = str(client[1]) + + # handle the SERVER_* + # Normally only the application should use the Host header but since the + # WSGI spec doesn't support unix sockets, we are using it to create + # viable SERVER_* if possible. + if isinstance(server, string_types): + server = server.split(":") + if len(server) == 1: + # unix socket + if host: + server = host.split(':') + if len(server) == 1: + if req.scheme == "http": + server.append(80) + elif req.scheme == "https": + server.append(443) + else: + server.append('') + else: + # no host header given which means that we are not behind a + # proxy, so append an empty port. + server.append('') + environ['SERVER_NAME'] = server[0] + environ['SERVER_PORT'] = str(server[1]) + + # set the path and script name + path_info = req.path + if script_name: + path_info = path_info.split(script_name, 1)[1] + environ['PATH_INFO'] = unquote_to_wsgi_str(path_info) + environ['SCRIPT_NAME'] = script_name + + # override the environ with the correct remote and server address if + # we are behind a proxy using the proxy protocol. + environ.update(proxy_environ(req)) + return resp, environ + + +class Response(object): + + def __init__(self, req, sock, cfg): + self.req = req + self.sock = sock + self.version = SERVER_SOFTWARE + self.status = None + self.chunked = False + self.must_close = False + self.headers = [] + self.headers_sent = False + self.response_length = None + self.sent = 0 + self.upgrade = False + self.cfg = cfg + + def force_close(self): + self.must_close = True + + def should_close(self): + if self.must_close or self.req.should_close(): + return True + if self.response_length is not None or self.chunked: + return False + if self.req.method == 'HEAD': + return False + if self.status_code < 200 or self.status_code in (204, 304): + return False + return True + + def start_response(self, status, headers, exc_info=None): + if exc_info: + try: + if self.status and self.headers_sent: + reraise(exc_info[0], exc_info[1], exc_info[2]) + finally: + exc_info = None + elif self.status is not None: + raise AssertionError("Response headers already set!") + + self.status = status + + # get the status code from the response here so we can use it to check + # the need for the connection header later without parsing the string + # each time. + try: + self.status_code = int(self.status.split()[0]) + except ValueError: + self.status_code = None + + self.process_headers(headers) + self.chunked = self.is_chunked() + return self.write + + def process_headers(self, headers): + for name, value in headers: + if not isinstance(name, string_types): + raise TypeError('%r is not a string' % name) + + if HEADER_RE.search(name): + raise InvalidHeaderName('%r' % name) + + if HEADER_VALUE_RE.search(value): + raise InvalidHeader('%r' % value) + + value = str(value).strip() + lname = name.lower().strip() + if lname == "content-length": + self.response_length = int(value) + elif util.is_hoppish(name): + if lname == "connection": + # handle websocket + if value.lower().strip() == "upgrade": + self.upgrade = True + elif lname == "upgrade": + if value.lower().strip() == "websocket": + self.headers.append((name.strip(), value)) + + # ignore hopbyhop headers + continue + self.headers.append((name.strip(), value)) + + def is_chunked(self): + # Only use chunked responses when the client is + # speaking HTTP/1.1 or newer and there was + # no Content-Length header set. + if self.response_length is not None: + return False + elif self.req.version <= (1, 0): + return False + elif self.req.method == 'HEAD': + # Responses to a HEAD request MUST NOT contain a response body. + return False + elif self.status_code in (204, 304): + # Do not use chunked responses when the response is guaranteed to + # not have a response body. + return False + return True + + def default_headers(self): + # set the connection header + if self.upgrade: + connection = "upgrade" + elif self.should_close(): + connection = "close" + else: + connection = "keep-alive" + + headers = [ + "HTTP/%s.%s %s\r\n" % (self.req.version[0], + self.req.version[1], self.status), + "Server: %s\r\n" % self.version, + "Date: %s\r\n" % util.http_date(), + "Connection: %s\r\n" % connection + ] + if self.chunked: + headers.append("Transfer-Encoding: chunked\r\n") + return headers + + def send_headers(self): + if self.headers_sent: + return + tosend = self.default_headers() + tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers]) + + header_str = "%s\r\n" % "".join(tosend) + util.write(self.sock, util.to_bytestring(header_str, "ascii")) + self.headers_sent = True + + def write(self, arg): + self.send_headers() + if not isinstance(arg, binary_type): + raise TypeError('%r is not a byte' % arg) + arglen = len(arg) + tosend = arglen + if self.response_length is not None: + if self.sent >= self.response_length: + # Never write more than self.response_length bytes + return + + tosend = min(self.response_length - self.sent, tosend) + if tosend < arglen: + arg = arg[:tosend] + + # Sending an empty chunk signals the end of the + # response and prematurely closes the response + if self.chunked and tosend == 0: + return + + self.sent += tosend + util.write(self.sock, arg, self.chunked) + + def can_sendfile(self): + return self.cfg.sendfile is not False and sendfile is not None + + def sendfile(self, respiter): + if self.cfg.is_ssl or not self.can_sendfile(): + return False + + if not util.has_fileno(respiter.filelike): + return False + + fileno = respiter.filelike.fileno() + try: + offset = os.lseek(fileno, 0, os.SEEK_CUR) + if self.response_length is None: + filesize = os.fstat(fileno).st_size + + # The file may be special and sendfile will fail. + # It may also be zero-length, but that is okay. + if filesize == 0: + return False + + nbytes = filesize - offset + else: + nbytes = self.response_length + except (OSError, io.UnsupportedOperation): + return False + + self.send_headers() + + if self.is_chunked(): + chunk_size = "%X\r\n" % nbytes + self.sock.sendall(chunk_size.encode('utf-8')) + + sockno = self.sock.fileno() + sent = 0 + + while sent != nbytes: + count = min(nbytes - sent, BLKSIZE) + sent += sendfile(sockno, fileno, offset + sent, count) + + if self.is_chunked(): + self.sock.sendall(b"\r\n") + + os.lseek(fileno, offset, os.SEEK_SET) + + return True + + def write_file(self, respiter): + if not self.sendfile(respiter): + for item in respiter: + self.write(item) + + def close(self): + if not self.headers_sent: + self.send_headers() + if self.chunked: + util.write_chunk(self.sock, b"") diff --git a/py3/lib/python3.6/site-packages/gunicorn/instrument/__init__.py b/py3/lib/python3.6/site-packages/gunicorn/instrument/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/py3/lib/python3.6/site-packages/gunicorn/instrument/statsd.py b/py3/lib/python3.6/site-packages/gunicorn/instrument/statsd.py new file mode 100644 index 0000000..4bbcb20 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/instrument/statsd.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +"Bare-bones implementation of statsD's protocol, client-side" + +import socket +import logging +from re import sub + +from gunicorn.glogging import Logger +from gunicorn import six + +# Instrumentation constants +METRIC_VAR = "metric" +VALUE_VAR = "value" +MTYPE_VAR = "mtype" +GAUGE_TYPE = "gauge" +COUNTER_TYPE = "counter" +HISTOGRAM_TYPE = "histogram" + +class Statsd(Logger): + """statsD-based instrumentation, that passes as a logger + """ + def __init__(self, cfg): + """host, port: statsD server + """ + Logger.__init__(self, cfg) + self.prefix = sub(r"^(.+[^.]+)\.*$", "\\g<1>.", cfg.statsd_prefix) + try: + host, port = cfg.statsd_host + self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + self.sock.connect((host, int(port))) + except Exception: + self.sock = None + + # Log errors and warnings + def critical(self, msg, *args, **kwargs): + Logger.critical(self, msg, *args, **kwargs) + self.increment("gunicorn.log.critical", 1) + + def error(self, msg, *args, **kwargs): + Logger.error(self, msg, *args, **kwargs) + self.increment("gunicorn.log.error", 1) + + def warning(self, msg, *args, **kwargs): + Logger.warning(self, msg, *args, **kwargs) + self.increment("gunicorn.log.warning", 1) + + def exception(self, msg, *args, **kwargs): + Logger.exception(self, msg, *args, **kwargs) + self.increment("gunicorn.log.exception", 1) + + # Special treatement for info, the most common log level + def info(self, msg, *args, **kwargs): + self.log(logging.INFO, msg, *args, **kwargs) + + # skip the run-of-the-mill logs + def debug(self, msg, *args, **kwargs): + self.log(logging.DEBUG, msg, *args, **kwargs) + + def log(self, lvl, msg, *args, **kwargs): + """Log a given statistic if metric, value and type are present + """ + try: + extra = kwargs.get("extra", None) + if extra is not None: + metric = extra.get(METRIC_VAR, None) + value = extra.get(VALUE_VAR, None) + typ = extra.get(MTYPE_VAR, None) + if metric and value and typ: + if typ == GAUGE_TYPE: + self.gauge(metric, value) + elif typ == COUNTER_TYPE: + self.increment(metric, value) + elif typ == HISTOGRAM_TYPE: + self.histogram(metric, value) + else: + pass + + # Log to parent logger only if there is something to say + if msg: + Logger.log(self, lvl, msg, *args, **kwargs) + except Exception: + Logger.warning(self, "Failed to log to statsd", exc_info=True) + + # access logging + def access(self, resp, req, environ, request_time): + """Measure request duration + request_time is a datetime.timedelta + """ + Logger.access(self, resp, req, environ, request_time) + duration_in_ms = request_time.seconds * 1000 + float(request_time.microseconds) / 10 ** 3 + status = resp.status + if isinstance(status, str): + status = int(status.split(None, 1)[0]) + self.histogram("gunicorn.request.duration", duration_in_ms) + self.increment("gunicorn.requests", 1) + self.increment("gunicorn.request.status.%d" % status, 1) + + # statsD methods + # you can use those directly if you want + def gauge(self, name, value): + self._sock_send("{0}{1}:{2}|g".format(self.prefix, name, value)) + + def increment(self, name, value, sampling_rate=1.0): + self._sock_send("{0}{1}:{2}|c|@{3}".format(self.prefix, name, value, sampling_rate)) + + def decrement(self, name, value, sampling_rate=1.0): + self._sock_send("{0}{1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate)) + + def histogram(self, name, value): + self._sock_send("{0}{1}:{2}|ms".format(self.prefix, name, value)) + + def _sock_send(self, msg): + try: + if isinstance(msg, six.text_type): + msg = msg.encode("ascii") + if self.sock: + self.sock.send(msg) + except Exception: + Logger.warning(self, "Error sending message to statsd", exc_info=True) diff --git a/py3/lib/python3.6/site-packages/gunicorn/pidfile.py b/py3/lib/python3.6/site-packages/gunicorn/pidfile.py new file mode 100644 index 0000000..a6e085f --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/pidfile.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import errno +import os +import tempfile + + +class Pidfile(object): + """\ + Manage a PID file. If a specific name is provided + it and '"%s.oldpid" % name' will be used. Otherwise + we create a temp file using os.mkstemp. + """ + + def __init__(self, fname): + self.fname = fname + self.pid = None + + def create(self, pid): + oldpid = self.validate() + if oldpid: + if oldpid == os.getpid(): + return + msg = "Already running on PID %s (or pid file '%s' is stale)" + raise RuntimeError(msg % (oldpid, self.fname)) + + self.pid = pid + + # Write pidfile + fdir = os.path.dirname(self.fname) + if fdir and not os.path.isdir(fdir): + raise RuntimeError("%s doesn't exist. Can't create pidfile." % fdir) + fd, fname = tempfile.mkstemp(dir=fdir) + os.write(fd, ("%s\n" % self.pid).encode('utf-8')) + if self.fname: + os.rename(fname, self.fname) + else: + self.fname = fname + os.close(fd) + + # set permissions to -rw-r--r-- + os.chmod(self.fname, 420) + + def rename(self, path): + self.unlink() + self.fname = path + self.create(self.pid) + + def unlink(self): + """ delete pidfile""" + try: + with open(self.fname, "r") as f: + pid1 = int(f.read() or 0) + + if pid1 == self.pid: + os.unlink(self.fname) + except: + pass + + def validate(self): + """ Validate pidfile and make it stale if needed""" + if not self.fname: + return + try: + with open(self.fname, "r") as f: + try: + wpid = int(f.read()) + except ValueError: + return + + try: + os.kill(wpid, 0) + return wpid + except OSError as e: + if e.args[0] == errno.EPERM: + return wpid + if e.args[0] == errno.ESRCH: + return + raise + except IOError as e: + if e.args[0] == errno.ENOENT: + return + raise diff --git a/py3/lib/python3.6/site-packages/gunicorn/reloader.py b/py3/lib/python3.6/site-packages/gunicorn/reloader.py new file mode 100644 index 0000000..c879885 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/reloader.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os +import os.path +import re +import sys +import time +import threading + +COMPILED_EXT_RE = re.compile(r'py[co]$') + + +class Reloader(threading.Thread): + def __init__(self, extra_files=None, interval=1, callback=None): + super(Reloader, self).__init__() + self.setDaemon(True) + self._extra_files = set(extra_files or ()) + self._extra_files_lock = threading.RLock() + self._interval = interval + self._callback = callback + + def add_extra_file(self, filename): + with self._extra_files_lock: + self._extra_files.add(filename) + + def get_files(self): + fnames = [ + COMPILED_EXT_RE.sub('py', module.__file__) + for module in tuple(sys.modules.values()) + if getattr(module, '__file__', None) + ] + + with self._extra_files_lock: + fnames.extend(self._extra_files) + + return fnames + + def run(self): + mtimes = {} + while True: + for filename in self.get_files(): + try: + mtime = os.stat(filename).st_mtime + except OSError: + continue + old_time = mtimes.get(filename) + if old_time is None: + mtimes[filename] = mtime + continue + elif mtime > old_time: + if self._callback: + self._callback(filename) + time.sleep(self._interval) + +has_inotify = False +if sys.platform.startswith('linux'): + try: + from inotify.adapters import Inotify + import inotify.constants + has_inotify = True + except ImportError: + pass + + +if has_inotify: + + class InotifyReloader(threading.Thread): + event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE + | inotify.constants.IN_DELETE_SELF | inotify.constants.IN_MODIFY + | inotify.constants.IN_MOVE_SELF | inotify.constants.IN_MOVED_FROM + | inotify.constants.IN_MOVED_TO) + + def __init__(self, extra_files=None, callback=None): + super(InotifyReloader, self).__init__() + self.setDaemon(True) + self._callback = callback + self._dirs = set() + self._watcher = Inotify() + + for extra_file in extra_files: + self.add_extra_file(extra_file) + + def add_extra_file(self, filename): + dirname = os.path.dirname(filename) + + if dirname in self._dirs: + return + + self._watcher.add_watch(dirname, mask=self.event_mask) + self._dirs.add(dirname) + + def get_dirs(self): + fnames = [ + os.path.dirname(COMPILED_EXT_RE.sub('py', module.__file__)) + for module in tuple(sys.modules.values()) + if hasattr(module, '__file__') + ] + + return set(fnames) + + def run(self): + self._dirs = self.get_dirs() + + for dirname in self._dirs: + self._watcher.add_watch(dirname, mask=self.event_mask) + + for event in self._watcher.event_gen(): + if event is None: + continue + + filename = event[3] + + self._callback(filename) + +else: + + class InotifyReloader(object): + def __init__(self, callback=None): + raise ImportError('You must have the inotify module installed to ' + 'use the inotify reloader') + + +preferred_reloader = InotifyReloader if has_inotify else Reloader + +reloader_engines = { + 'auto': preferred_reloader, + 'poll': Reloader, + 'inotify': InotifyReloader, +} diff --git a/py3/lib/python3.6/site-packages/gunicorn/selectors.py b/py3/lib/python3.6/site-packages/gunicorn/selectors.py new file mode 100644 index 0000000..cdae569 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/selectors.py @@ -0,0 +1,592 @@ +"""Selectors module. + +This module allows high-level and efficient I/O multiplexing, built upon the +`select` module primitives. + +The following code adapted from trollius.selectors. +""" + + +from abc import ABCMeta, abstractmethod +from collections import namedtuple, Mapping +import math +import select +import sys + +from gunicorn._compat import wrap_error, InterruptedError +from gunicorn import six + + +# generic events, that must be mapped to implementation-specific ones +EVENT_READ = (1 << 0) +EVENT_WRITE = (1 << 1) + + +def _fileobj_to_fd(fileobj): + """Return a file descriptor from a file object. + + Parameters: + fileobj -- file object or file descriptor + + Returns: + corresponding file descriptor + + Raises: + ValueError if the object is invalid + """ + if isinstance(fileobj, six.integer_types): + fd = fileobj + else: + try: + fd = int(fileobj.fileno()) + except (AttributeError, TypeError, ValueError): + raise ValueError("Invalid file object: " + "{0!r}".format(fileobj)) + if fd < 0: + raise ValueError("Invalid file descriptor: {0}".format(fd)) + return fd + + +SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) +"""Object used to associate a file object to its backing file descriptor, +selected event mask and attached data.""" + + +class _SelectorMapping(Mapping): + """Mapping of file objects to selector keys.""" + + def __init__(self, selector): + self._selector = selector + + def __len__(self): + return len(self._selector._fd_to_key) + + def __getitem__(self, fileobj): + try: + fd = self._selector._fileobj_lookup(fileobj) + return self._selector._fd_to_key[fd] + except KeyError: + raise KeyError("{0!r} is not registered".format(fileobj)) + + def __iter__(self): + return iter(self._selector._fd_to_key) + + +class BaseSelector(six.with_metaclass(ABCMeta)): + """Selector abstract base class. + + A selector supports registering file objects to be monitored for specific + I/O events. + + A file object is a file descriptor or any object with a `fileno()` method. + An arbitrary object can be attached to the file object, which can be used + for example to store context information, a callback, etc. + + A selector can use various implementations (select(), poll(), epoll()...) + depending on the platform. The default `Selector` class uses the most + efficient implementation on the current platform. + """ + + @abstractmethod + def register(self, fileobj, events, data=None): + """Register a file object. + + Parameters: + fileobj -- file object or file descriptor + events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) + data -- attached data + + Returns: + SelectorKey instance + + Raises: + ValueError if events is invalid + KeyError if fileobj is already registered + OSError if fileobj is closed or otherwise is unacceptable to + the underlying system call (if a system call is made) + + Note: + OSError may or may not be raised + """ + raise NotImplementedError + + @abstractmethod + def unregister(self, fileobj): + """Unregister a file object. + + Parameters: + fileobj -- file object or file descriptor + + Returns: + SelectorKey instance + + Raises: + KeyError if fileobj is not registered + + Note: + If fileobj is registered but has since been closed this does + *not* raise OSError (even if the wrapped syscall does) + """ + raise NotImplementedError + + def modify(self, fileobj, events, data=None): + """Change a registered file object monitored events or attached data. + + Parameters: + fileobj -- file object or file descriptor + events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) + data -- attached data + + Returns: + SelectorKey instance + + Raises: + Anything that unregister() or register() raises + """ + self.unregister(fileobj) + return self.register(fileobj, events, data) + + @abstractmethod + def select(self, timeout=None): + """Perform the actual selection, until some monitored file objects are + ready or a timeout expires. + + Parameters: + timeout -- if timeout > 0, this specifies the maximum wait time, in + seconds + if timeout <= 0, the select() call won't block, and will + report the currently ready file objects + if timeout is None, select() will block until a monitored + file object becomes ready + + Returns: + list of (key, events) for ready file objects + `events` is a bitwise mask of EVENT_READ|EVENT_WRITE + """ + raise NotImplementedError + + def close(self): + """Close the selector. + + This must be called to make sure that any underlying resource is freed. + """ + pass + + def get_key(self, fileobj): + """Return the key associated to a registered file object. + + Returns: + SelectorKey for this file object + """ + mapping = self.get_map() + try: + return mapping[fileobj] + except KeyError: + raise KeyError("{0!r} is not registered".format(fileobj)) + + @abstractmethod + def get_map(self): + """Return a mapping of file objects to selector keys.""" + raise NotImplementedError + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + +class _BaseSelectorImpl(BaseSelector): + """Base selector implementation.""" + + def __init__(self): + # this maps file descriptors to keys + self._fd_to_key = {} + # read-only mapping returned by get_map() + self._map = _SelectorMapping(self) + + def _fileobj_lookup(self, fileobj): + """Return a file descriptor from a file object. + + This wraps _fileobj_to_fd() to do an exhaustive search in case + the object is invalid but we still have it in our map. This + is used by unregister() so we can unregister an object that + was previously registered even if it is closed. It is also + used by _SelectorMapping. + """ + try: + return _fileobj_to_fd(fileobj) + except ValueError: + # Do an exhaustive search. + for key in self._fd_to_key.values(): + if key.fileobj is fileobj: + return key.fd + # Raise ValueError after all. + raise + + def register(self, fileobj, events, data=None): + if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): + raise ValueError("Invalid events: {0!r}".format(events)) + + key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) + + if key.fd in self._fd_to_key: + raise KeyError("{0!r} (FD {1}) is already registered" + .format(fileobj, key.fd)) + + self._fd_to_key[key.fd] = key + return key + + def unregister(self, fileobj): + try: + key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) + except KeyError: + raise KeyError("{0!r} is not registered".format(fileobj)) + return key + + def modify(self, fileobj, events, data=None): + # TODO: Subclasses can probably optimize this even further. + try: + key = self._fd_to_key[self._fileobj_lookup(fileobj)] + except KeyError: + raise KeyError("{0!r} is not registered".format(fileobj)) + if events != key.events: + self.unregister(fileobj) + key = self.register(fileobj, events, data) + elif data != key.data: + # Use a shortcut to update the data. + key = key._replace(data=data) + self._fd_to_key[key.fd] = key + return key + + def close(self): + self._fd_to_key.clear() + + def get_map(self): + return self._map + + def _key_from_fd(self, fd): + """Return the key associated to a given file descriptor. + + Parameters: + fd -- file descriptor + + Returns: + corresponding key, or None if not found + """ + try: + return self._fd_to_key[fd] + except KeyError: + return None + + +class SelectSelector(_BaseSelectorImpl): + """Select-based selector.""" + + def __init__(self): + super(SelectSelector, self).__init__() + self._readers = set() + self._writers = set() + + def register(self, fileobj, events, data=None): + key = super(SelectSelector, self).register(fileobj, events, data) + if events & EVENT_READ: + self._readers.add(key.fd) + if events & EVENT_WRITE: + self._writers.add(key.fd) + return key + + def unregister(self, fileobj): + key = super(SelectSelector, self).unregister(fileobj) + self._readers.discard(key.fd) + self._writers.discard(key.fd) + return key + + if sys.platform == 'win32': + def _select(self, r, w, _, timeout=None): + r, w, x = select.select(r, w, w, timeout) + return r, w + x, [] + else: + _select = select.select + + def select(self, timeout=None): + timeout = None if timeout is None else max(timeout, 0) + ready = [] + try: + r, w, _ = wrap_error(self._select, + self._readers, self._writers, [], timeout) + except InterruptedError: + return ready + r = set(r) + w = set(w) + for fd in r | w: + events = 0 + if fd in r: + events |= EVENT_READ + if fd in w: + events |= EVENT_WRITE + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + +if hasattr(select, 'poll'): + + class PollSelector(_BaseSelectorImpl): + """Poll-based selector.""" + + def __init__(self): + super(PollSelector, self).__init__() + self._poll = select.poll() + + def register(self, fileobj, events, data=None): + key = super(PollSelector, self).register(fileobj, events, data) + poll_events = 0 + if events & EVENT_READ: + poll_events |= select.POLLIN + if events & EVENT_WRITE: + poll_events |= select.POLLOUT + self._poll.register(key.fd, poll_events) + return key + + def unregister(self, fileobj): + key = super(PollSelector, self).unregister(fileobj) + self._poll.unregister(key.fd) + return key + + def select(self, timeout=None): + if timeout is None: + timeout = None + elif timeout <= 0: + timeout = 0 + else: + # poll() has a resolution of 1 millisecond, round away from + # zero to wait *at least* timeout seconds. + timeout = int(math.ceil(timeout * 1e3)) + ready = [] + try: + fd_event_list = wrap_error(self._poll.poll, timeout) + except InterruptedError: + return ready + for fd, event in fd_event_list: + events = 0 + if event & ~select.POLLIN: + events |= EVENT_WRITE + if event & ~select.POLLOUT: + events |= EVENT_READ + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + +if hasattr(select, 'epoll'): + + class EpollSelector(_BaseSelectorImpl): + """Epoll-based selector.""" + + def __init__(self): + super(EpollSelector, self).__init__() + self._epoll = select.epoll() + + def fileno(self): + return self._epoll.fileno() + + def register(self, fileobj, events, data=None): + key = super(EpollSelector, self).register(fileobj, events, data) + epoll_events = 0 + if events & EVENT_READ: + epoll_events |= select.EPOLLIN + if events & EVENT_WRITE: + epoll_events |= select.EPOLLOUT + self._epoll.register(key.fd, epoll_events) + return key + + def unregister(self, fileobj): + key = super(EpollSelector, self).unregister(fileobj) + try: + self._epoll.unregister(key.fd) + except OSError: + # This can happen if the FD was closed since it + # was registered. + pass + return key + + def select(self, timeout=None): + if timeout is None: + timeout = -1 + elif timeout <= 0: + timeout = 0 + else: + # epoll_wait() has a resolution of 1 millisecond, round away + # from zero to wait *at least* timeout seconds. + timeout = math.ceil(timeout * 1e3) * 1e-3 + max_ev = len(self._fd_to_key) + ready = [] + try: + fd_event_list = wrap_error(self._epoll.poll, timeout, max_ev) + except InterruptedError: + return ready + for fd, event in fd_event_list: + events = 0 + if event & ~select.EPOLLIN: + events |= EVENT_WRITE + if event & ~select.EPOLLOUT: + events |= EVENT_READ + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + def close(self): + self._epoll.close() + super(EpollSelector, self).close() + + +if hasattr(select, 'devpoll'): + + class DevpollSelector(_BaseSelectorImpl): + """Solaris /dev/poll selector.""" + + def __init__(self): + super(DevpollSelector, self).__init__() + self._devpoll = select.devpoll() + + def fileno(self): + return self._devpoll.fileno() + + def register(self, fileobj, events, data=None): + key = super(DevpollSelector, self).register(fileobj, events, data) + poll_events = 0 + if events & EVENT_READ: + poll_events |= select.POLLIN + if events & EVENT_WRITE: + poll_events |= select.POLLOUT + self._devpoll.register(key.fd, poll_events) + return key + + def unregister(self, fileobj): + key = super(DevpollSelector, self).unregister(fileobj) + self._devpoll.unregister(key.fd) + return key + + def select(self, timeout=None): + if timeout is None: + timeout = None + elif timeout <= 0: + timeout = 0 + else: + # devpoll() has a resolution of 1 millisecond, round away from + # zero to wait *at least* timeout seconds. + timeout = math.ceil(timeout * 1e3) + ready = [] + try: + fd_event_list = self._devpoll.poll(timeout) + except InterruptedError: + return ready + for fd, event in fd_event_list: + events = 0 + if event & ~select.POLLIN: + events |= EVENT_WRITE + if event & ~select.POLLOUT: + events |= EVENT_READ + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + def close(self): + self._devpoll.close() + super(DevpollSelector, self).close() + + +if hasattr(select, 'kqueue'): + + class KqueueSelector(_BaseSelectorImpl): + """Kqueue-based selector.""" + + def __init__(self): + super(KqueueSelector, self).__init__() + self._kqueue = select.kqueue() + + def fileno(self): + return self._kqueue.fileno() + + def register(self, fileobj, events, data=None): + key = super(KqueueSelector, self).register(fileobj, events, data) + if events & EVENT_READ: + kev = select.kevent(key.fd, select.KQ_FILTER_READ, + select.KQ_EV_ADD) + self._kqueue.control([kev], 0, 0) + if events & EVENT_WRITE: + kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, + select.KQ_EV_ADD) + self._kqueue.control([kev], 0, 0) + return key + + def unregister(self, fileobj): + key = super(KqueueSelector, self).unregister(fileobj) + if key.events & EVENT_READ: + kev = select.kevent(key.fd, select.KQ_FILTER_READ, + select.KQ_EV_DELETE) + try: + self._kqueue.control([kev], 0, 0) + except OSError: + # This can happen if the FD was closed since it + # was registered. + pass + if key.events & EVENT_WRITE: + kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, + select.KQ_EV_DELETE) + try: + self._kqueue.control([kev], 0, 0) + except OSError: + # See comment above. + pass + return key + + def select(self, timeout=None): + timeout = None if timeout is None else max(timeout, 0) + max_ev = len(self._fd_to_key) + ready = [] + try: + kev_list = wrap_error(self._kqueue.control, + None, max_ev, timeout) + except InterruptedError: + return ready + for kev in kev_list: + fd = kev.ident + flag = kev.filter + events = 0 + if flag == select.KQ_FILTER_READ: + events |= EVENT_READ + if flag == select.KQ_FILTER_WRITE: + events |= EVENT_WRITE + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + def close(self): + self._kqueue.close() + super(KqueueSelector, self).close() + + +# Choose the best implementation: roughly, epoll|kqueue|devpoll > poll > select. +# select() also can't accept a FD > FD_SETSIZE (usually around 1024) +if 'KqueueSelector' in globals(): + DefaultSelector = KqueueSelector +elif 'EpollSelector' in globals(): + DefaultSelector = EpollSelector +elif 'DevpollSelector' in globals(): + DefaultSelector = DevpollSelector +elif 'PollSelector' in globals(): + DefaultSelector = PollSelector +else: + DefaultSelector = SelectSelector diff --git a/py3/lib/python3.6/site-packages/gunicorn/six.py b/py3/lib/python3.6/site-packages/gunicorn/six.py new file mode 100644 index 0000000..21b0e80 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/six.py @@ -0,0 +1,762 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2014 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.8.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + # This is a bit ugly, but it avoids running this again. + delattr(obj.__class__, self.name) + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), + MovedModule("winreg", "_winreg"), +] +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) +else: + def iterkeys(d, **kw): + return iter(d.iterkeys(**kw)) + + def itervalues(d, **kw): + return iter(d.itervalues(**kw)) + + def iteritems(d, **kw): + return iter(d.iteritems(**kw)) + + def iterlists(d, **kw): + return iter(d.iterlists(**kw)) + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + def u(s): + return s + unichr = chr + if sys.version_info[1] <= 1: + def int2byte(i): + return bytes((i,)) + else: + # This is about 2x faster than the implementation above on 3.2+ + int2byte = operator.methodcaller("to_bytes", 1, "big") + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO +else: + def b(s): + return s + # Workaround for standalone backslash + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + def byte2int(bs): + return ord(bs[0]) + def indexbytes(buf, i): + return ord(buf[i]) + def iterbytes(buf): + return (ord(byte) for byte in buf) + import StringIO + StringIO = BytesIO = StringIO.StringIO +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/py3/lib/python3.6/site-packages/gunicorn/sock.py b/py3/lib/python3.6/site-packages/gunicorn/sock.py new file mode 100644 index 0000000..8870936 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/sock.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import errno +import os +import socket +import stat +import sys +import time + +from gunicorn import util +from gunicorn.six import string_types + + +class BaseSocket(object): + + def __init__(self, address, conf, log, fd=None): + self.log = log + self.conf = conf + + self.cfg_addr = address + if fd is None: + sock = socket.socket(self.FAMILY, socket.SOCK_STREAM) + bound = False + else: + sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM) + os.close(fd) + bound = True + + self.sock = self.set_options(sock, bound=bound) + + def __str__(self): + return "" % self.sock.fileno() + + def __getattr__(self, name): + return getattr(self.sock, name) + + def set_options(self, sock, bound=False): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if (self.conf.reuse_port + and hasattr(socket, 'SO_REUSEPORT')): # pragma: no cover + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except socket.error as err: + if err.errno not in (errno.ENOPROTOOPT, errno.EINVAL): + raise + if not bound: + self.bind(sock) + sock.setblocking(0) + + # make sure that the socket can be inherited + if hasattr(sock, "set_inheritable"): + sock.set_inheritable(True) + + sock.listen(self.conf.backlog) + return sock + + def bind(self, sock): + sock.bind(self.cfg_addr) + + def close(self): + if self.sock is None: + return + + try: + self.sock.close() + except socket.error as e: + self.log.info("Error while closing socket %s", str(e)) + + self.sock = None + + +class TCPSocket(BaseSocket): + + FAMILY = socket.AF_INET + + def __str__(self): + if self.conf.is_ssl: + scheme = "https" + else: + scheme = "http" + + addr = self.sock.getsockname() + return "%s://%s:%d" % (scheme, addr[0], addr[1]) + + def set_options(self, sock, bound=False): + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return super(TCPSocket, self).set_options(sock, bound=bound) + + +class TCP6Socket(TCPSocket): + + FAMILY = socket.AF_INET6 + + def __str__(self): + (host, port, _, _) = self.sock.getsockname() + return "http://[%s]:%d" % (host, port) + + +class UnixSocket(BaseSocket): + + FAMILY = socket.AF_UNIX + + def __init__(self, addr, conf, log, fd=None): + if fd is None: + try: + st = os.stat(addr) + except OSError as e: + if e.args[0] != errno.ENOENT: + raise + else: + if stat.S_ISSOCK(st.st_mode): + os.remove(addr) + else: + raise ValueError("%r is not a socket" % addr) + super(UnixSocket, self).__init__(addr, conf, log, fd=fd) + + def __str__(self): + return "unix:%s" % self.cfg_addr + + def bind(self, sock): + old_umask = os.umask(self.conf.umask) + sock.bind(self.cfg_addr) + util.chown(self.cfg_addr, self.conf.uid, self.conf.gid) + os.umask(old_umask) + + +def _sock_type(addr): + if isinstance(addr, tuple): + if util.is_ipv6(addr[0]): + sock_type = TCP6Socket + else: + sock_type = TCPSocket + elif isinstance(addr, string_types): + sock_type = UnixSocket + else: + raise TypeError("Unable to create socket from: %r" % addr) + return sock_type + + +def create_sockets(conf, log, fds=None): + """ + Create a new socket for the configured addresses or file descriptors. + + If a configured address is a tuple then a TCP socket is created. + If it is a string, a Unix socket is created. Otherwise, a TypeError is + raised. + """ + listeners = [] + + # get it only once + laddr = conf.address + + # check ssl config early to raise the error on startup + # only the certfile is needed since it can contains the keyfile + if conf.certfile and not os.path.exists(conf.certfile): + raise ValueError('certfile "%s" does not exist' % conf.certfile) + + if conf.keyfile and not os.path.exists(conf.keyfile): + raise ValueError('keyfile "%s" does not exist' % conf.keyfile) + + # sockets are already bound + if fds is not None: + for fd in fds: + sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM) + sock_name = sock.getsockname() + sock_type = _sock_type(sock_name) + listener = sock_type(sock_name, conf, log, fd=fd) + listeners.append(listener) + + return listeners + + # no sockets is bound, first initialization of gunicorn in this env. + for addr in laddr: + sock_type = _sock_type(addr) + sock = None + for i in range(5): + try: + sock = sock_type(addr, conf, log) + except socket.error as e: + if e.args[0] == errno.EADDRINUSE: + log.error("Connection in use: %s", str(addr)) + if e.args[0] == errno.EADDRNOTAVAIL: + log.error("Invalid address: %s", str(addr)) + if i < 5: + msg = "connection to {addr} failed: {error}" + log.debug(msg.format(addr=str(addr), error=str(e))) + log.error("Retrying in 1 second.") + time.sleep(1) + else: + break + + if sock is None: + log.error("Can't connect to %s", str(addr)) + sys.exit(1) + + listeners.append(sock) + + return listeners + + +def close_sockets(listeners, unlink=True): + for sock in listeners: + sock_name = sock.getsockname() + sock.close() + if unlink and _sock_type(sock_name) is UnixSocket: + os.unlink(sock_name) diff --git a/py3/lib/python3.6/site-packages/gunicorn/systemd.py b/py3/lib/python3.6/site-packages/gunicorn/systemd.py new file mode 100644 index 0000000..10ffb8d --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/systemd.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os + +SD_LISTEN_FDS_START = 3 + + +def listen_fds(unset_environment=True): + """ + Get the number of sockets inherited from systemd socket activation. + + :param unset_environment: clear systemd environment variables unless False + :type unset_environment: bool + :return: the number of sockets to inherit from systemd socket activation + :rtype: int + + Returns zero immediately if $LISTEN_PID is not set to the current pid. + Otherwise, returns the number of systemd activation sockets specified by + $LISTEN_FDS. + + When $LISTEN_PID matches the current pid, unsets the environment variables + unless the ``unset_environment`` flag is ``False``. + + .. note:: + Unlike the sd_listen_fds C function, this implementation does not set + the FD_CLOEXEC flag because the gunicorn arbiter never needs to do this. + + .. seealso:: + ``_ + + """ + fds = int(os.environ.get('LISTEN_FDS', 0)) + listen_pid = int(os.environ.get('LISTEN_PID', 0)) + + if listen_pid != os.getpid(): + return 0 + + if unset_environment: + os.environ.pop('LISTEN_PID', None) + os.environ.pop('LISTEN_FDS', None) + + return fds diff --git a/py3/lib/python3.6/site-packages/gunicorn/util.py b/py3/lib/python3.6/site-packages/gunicorn/util.py new file mode 100644 index 0000000..84f6937 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/util.py @@ -0,0 +1,557 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from __future__ import print_function + +import email.utils +import fcntl +import io +import os +import pkg_resources +import pwd +import random +import socket +import sys +import textwrap +import time +import traceback +import inspect +import errno +import warnings +import logging +import re + +from gunicorn import _compat +from gunicorn.errors import AppImportError +from gunicorn.six import text_type +from gunicorn.workers import SUPPORTED_WORKERS + +REDIRECT_TO = getattr(os, 'devnull', '/dev/null') + +# Server and Date aren't technically hop-by-hop +# headers, but they are in the purview of the +# origin server which the WSGI spec says we should +# act like. So we drop them and add our own. +# +# In the future, concatenation server header values +# might be better, but nothing else does it and +# dropping them is easier. +hop_headers = set(""" + connection keep-alive proxy-authenticate proxy-authorization + te trailers transfer-encoding upgrade + server date + """.split()) + +try: + from setproctitle import setproctitle + + def _setproctitle(title): + setproctitle("gunicorn: %s" % title) +except ImportError: + def _setproctitle(title): + return + + +try: + from importlib import import_module +except ImportError: + def _resolve_name(name, package, level): + """Return the absolute name of the module to be imported.""" + if not hasattr(package, 'rindex'): + raise ValueError("'package' not set to a string") + dot = len(package) + for _ in range(level, 1, -1): + try: + dot = package.rindex('.', 0, dot) + except ValueError: + msg = "attempted relative import beyond top-level package" + raise ValueError(msg) + return "%s.%s" % (package[:dot], name) + + def import_module(name, package=None): + """Import a module. + +The 'package' argument is required when performing a relative import. It +specifies the package to use as the anchor point from which to resolve the +relative import to an absolute import. + +""" + if name.startswith('.'): + if not package: + raise TypeError("relative imports require the 'package' argument") + level = 0 + for character in name: + if character != '.': + break + level += 1 + name = _resolve_name(name[level:], package, level) + __import__(name) + return sys.modules[name] + + +def load_class(uri, default="gunicorn.workers.sync.SyncWorker", + section="gunicorn.workers"): + if inspect.isclass(uri): + return uri + if uri.startswith("egg:"): + # uses entry points + entry_str = uri.split("egg:")[1] + try: + dist, name = entry_str.rsplit("#", 1) + except ValueError: + dist = entry_str + name = default + + try: + return pkg_resources.load_entry_point(dist, section, name) + except: + exc = traceback.format_exc() + msg = "class uri %r invalid or not found: \n\n[%s]" + raise RuntimeError(msg % (uri, exc)) + else: + components = uri.split('.') + if len(components) == 1: + while True: + if uri.startswith("#"): + uri = uri[1:] + + if uri in SUPPORTED_WORKERS: + components = SUPPORTED_WORKERS[uri].split(".") + break + + try: + return pkg_resources.load_entry_point("gunicorn", + section, uri) + except: + exc = traceback.format_exc() + msg = "class uri %r invalid or not found: \n\n[%s]" + raise RuntimeError(msg % (uri, exc)) + + klass = components.pop(-1) + + try: + mod = import_module('.'.join(components)) + except: + exc = traceback.format_exc() + msg = "class uri %r invalid or not found: \n\n[%s]" + raise RuntimeError(msg % (uri, exc)) + return getattr(mod, klass) + + +def get_username(uid): + """ get the username for a user id""" + return pwd.getpwuid(uid).pw_name + + +def set_owner_process(uid, gid, initgroups=False): + """ set user and group of workers processes """ + + if gid: + if uid: + try: + username = get_username(uid) + except KeyError: + initgroups = False + + # versions of python < 2.6.2 don't manage unsigned int for + # groups like on osx or fedora + gid = abs(gid) & 0x7FFFFFFF + + if initgroups: + os.initgroups(username, gid) + elif gid != os.getgid(): + os.setgid(gid) + + if uid: + os.setuid(uid) + + +def chown(path, uid, gid): + gid = abs(gid) & 0x7FFFFFFF # see note above. + os.chown(path, uid, gid) + + +if sys.platform.startswith("win"): + def _waitfor(func, pathname, waitall=False): + # Peform the operation + func(pathname) + # Now setup the wait loop + if waitall: + dirname = pathname + else: + dirname, name = os.path.split(pathname) + dirname = dirname or '.' + # Check for `pathname` to be removed from the filesystem. + # The exponential backoff of the timeout amounts to a total + # of ~1 second after which the deletion is probably an error + # anyway. + # Testing on a i7@4.3GHz shows that usually only 1 iteration is + # required when contention occurs. + timeout = 0.001 + while timeout < 1.0: + # Note we are only testing for the existence of the file(s) in + # the contents of the directory regardless of any security or + # access rights. If we have made it this far, we have sufficient + # permissions to do that much using Python's equivalent of the + # Windows API FindFirstFile. + # Other Windows APIs can fail or give incorrect results when + # dealing with files that are pending deletion. + L = os.listdir(dirname) + if not L if waitall else name in L: + return + # Increase the timeout and try again + time.sleep(timeout) + timeout *= 2 + warnings.warn('tests may fail, delete still pending for ' + pathname, + RuntimeWarning, stacklevel=4) + + def _unlink(filename): + _waitfor(os.unlink, filename) +else: + _unlink = os.unlink + + +def unlink(filename): + try: + _unlink(filename) + except OSError as error: + # The filename need not exist. + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + raise + + +def is_ipv6(addr): + try: + socket.inet_pton(socket.AF_INET6, addr) + except socket.error: # not a valid address + return False + except ValueError: # ipv6 not supported on this platform + return False + return True + + +def parse_address(netloc, default_port=8000): + if re.match(r'unix:(//)?', netloc): + return re.split(r'unix:(//)?', netloc)[-1] + + if netloc.startswith("tcp://"): + netloc = netloc.split("tcp://")[1] + + # get host + if '[' in netloc and ']' in netloc: + host = netloc.split(']')[0][1:].lower() + elif ':' in netloc: + host = netloc.split(':')[0].lower() + elif netloc == "": + host = "0.0.0.0" + else: + host = netloc.lower() + + #get port + netloc = netloc.split(']')[-1] + if ":" in netloc: + port = netloc.split(':', 1)[1] + if not port.isdigit(): + raise RuntimeError("%r is not a valid port number." % port) + port = int(port) + else: + port = default_port + return (host, port) + + +def close_on_exec(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(fd, fcntl.F_SETFD, flags) + + +def set_non_blocking(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK + fcntl.fcntl(fd, fcntl.F_SETFL, flags) + + +def close(sock): + try: + sock.close() + except socket.error: + pass + +try: + from os import closerange +except ImportError: + def closerange(fd_low, fd_high): + # Iterate through and close all file descriptors. + for fd in range(fd_low, fd_high): + try: + os.close(fd) + except OSError: # ERROR, fd wasn't open to begin with (ignored) + pass + + +def write_chunk(sock, data): + if isinstance(data, text_type): + data = data.encode('utf-8') + chunk_size = "%X\r\n" % len(data) + chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"]) + sock.sendall(chunk) + + +def write(sock, data, chunked=False): + if chunked: + return write_chunk(sock, data) + sock.sendall(data) + + +def write_nonblock(sock, data, chunked=False): + timeout = sock.gettimeout() + if timeout != 0.0: + try: + sock.setblocking(0) + return write(sock, data, chunked) + finally: + sock.setblocking(1) + else: + return write(sock, data, chunked) + + +def write_error(sock, status_int, reason, mesg): + html = textwrap.dedent("""\ + + + %(reason)s + + +

%(reason)s

+ %(mesg)s + + + """) % {"reason": reason, "mesg": _compat.html_escape(mesg)} + + http = textwrap.dedent("""\ + HTTP/1.1 %s %s\r + Connection: close\r + Content-Type: text/html\r + Content-Length: %d\r + \r + %s""") % (str(status_int), reason, len(html), html) + write_nonblock(sock, http.encode('latin1')) + + +def import_app(module): + parts = module.split(":", 1) + if len(parts) == 1: + module, obj = module, "application" + else: + module, obj = parts[0], parts[1] + + try: + __import__(module) + except ImportError: + if module.endswith(".py") and os.path.exists(module): + msg = "Failed to find application, did you mean '%s:%s'?" + raise ImportError(msg % (module.rsplit(".", 1)[0], obj)) + else: + raise + + mod = sys.modules[module] + + is_debug = logging.root.level == logging.DEBUG + try: + app = eval(obj, vars(mod)) + except NameError: + if is_debug: + traceback.print_exception(*sys.exc_info()) + raise AppImportError("Failed to find application object %r in %r" % (obj, module)) + + if app is None: + raise AppImportError("Failed to find application object: %r" % obj) + + if not callable(app): + raise AppImportError("Application object must be callable.") + return app + + +def getcwd(): + # get current path, try to use PWD env first + try: + a = os.stat(os.environ['PWD']) + b = os.stat(os.getcwd()) + if a.st_ino == b.st_ino and a.st_dev == b.st_dev: + cwd = os.environ['PWD'] + else: + cwd = os.getcwd() + except: + cwd = os.getcwd() + return cwd + + +def http_date(timestamp=None): + """Return the current date and time formatted for a message header.""" + if timestamp is None: + timestamp = time.time() + s = email.utils.formatdate(timestamp, localtime=False, usegmt=True) + return s + + +def is_hoppish(header): + return header.lower().strip() in hop_headers + + +def daemonize(enable_stdio_inheritance=False): + """\ + Standard daemonization of a process. + http://www.svbug.com/documentation/comp.unix.programmer-FAQ/faq_2.html#SEC16 + """ + if 'GUNICORN_FD' not in os.environ: + if os.fork(): + os._exit(0) + os.setsid() + + if os.fork(): + os._exit(0) + + os.umask(0o22) + + # In both the following any file descriptors above stdin + # stdout and stderr are left untouched. The inheritance + # option simply allows one to have output go to a file + # specified by way of shell redirection when not wanting + # to use --error-log option. + + if not enable_stdio_inheritance: + # Remap all of stdin, stdout and stderr on to + # /dev/null. The expectation is that users have + # specified the --error-log option. + + closerange(0, 3) + + fd_null = os.open(REDIRECT_TO, os.O_RDWR) + + if fd_null != 0: + os.dup2(fd_null, 0) + + os.dup2(fd_null, 1) + os.dup2(fd_null, 2) + + else: + fd_null = os.open(REDIRECT_TO, os.O_RDWR) + + # Always redirect stdin to /dev/null as we would + # never expect to need to read interactive input. + + if fd_null != 0: + os.close(0) + os.dup2(fd_null, 0) + + # If stdout and stderr are still connected to + # their original file descriptors we check to see + # if they are associated with terminal devices. + # When they are we map them to /dev/null so that + # are still detached from any controlling terminal + # properly. If not we preserve them as they are. + # + # If stdin and stdout were not hooked up to the + # original file descriptors, then all bets are + # off and all we can really do is leave them as + # they were. + # + # This will allow 'gunicorn ... > output.log 2>&1' + # to work with stdout/stderr going to the file + # as expected. + # + # Note that if using --error-log option, the log + # file specified through shell redirection will + # only be used up until the log file specified + # by the option takes over. As it replaces stdout + # and stderr at the file descriptor level, then + # anything using stdout or stderr, including having + # cached a reference to them, will still work. + + def redirect(stream, fd_expect): + try: + fd = stream.fileno() + if fd == fd_expect and stream.isatty(): + os.close(fd) + os.dup2(fd_null, fd) + except AttributeError: + pass + + redirect(sys.stdout, 1) + redirect(sys.stderr, 2) + + +def seed(): + try: + random.seed(os.urandom(64)) + except NotImplementedError: + random.seed('%s.%s' % (time.time(), os.getpid())) + + +def check_is_writeable(path): + try: + f = open(path, 'a') + except IOError as e: + raise RuntimeError("Error: '%s' isn't writable [%r]" % (path, e)) + f.close() + + +def to_bytestring(value, encoding="utf8"): + """Converts a string argument to a byte string""" + if isinstance(value, bytes): + return value + if not isinstance(value, text_type): + raise TypeError('%r is not a string' % value) + + return value.encode(encoding) + +def has_fileno(obj): + if not hasattr(obj, "fileno"): + return False + + # check BytesIO case and maybe others + try: + obj.fileno() + except (AttributeError, IOError, io.UnsupportedOperation): + return False + + return True + + +def warn(msg): + print("!!!", file=sys.stderr) + + lines = msg.splitlines() + for i, line in enumerate(lines): + if i == 0: + line = "WARNING: %s" % line + print("!!! %s" % line, file=sys.stderr) + + print("!!!\n", file=sys.stderr) + sys.stderr.flush() + + +def make_fail_app(msg): + msg = to_bytestring(msg) + + def app(environ, start_response): + start_response("500 Internal Server Error", [ + ("Content-Type", "text/plain"), + ("Content-Length", str(len(msg))) + ]) + return [msg] + + return app + + +def split_request_uri(uri): + if uri.startswith("//"): + # When the path starts with //, urlsplit considers it as a + # relative uri while the RFC says we should consider it as abs_path + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2 + # We use temporary dot prefix to workaround this behaviour + parts = _compat.urlsplit("." + uri) + return parts._replace(path=parts.path[1:]) + + return _compat.urlsplit(uri) diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/__init__.py b/py3/lib/python3.6/site-packages/gunicorn/workers/__init__.py new file mode 100644 index 0000000..074e001 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import sys + +# supported gunicorn workers. +SUPPORTED_WORKERS = { + "sync": "gunicorn.workers.sync.SyncWorker", + "eventlet": "gunicorn.workers.geventlet.EventletWorker", + "gevent": "gunicorn.workers.ggevent.GeventWorker", + "gevent_wsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker", + "gevent_pywsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker", + "tornado": "gunicorn.workers.gtornado.TornadoWorker", + "gthread": "gunicorn.workers.gthread.ThreadWorker", +} + + +if sys.version_info >= (3, 4): + # gaiohttp worker can be used with Python 3.4+ only. + SUPPORTED_WORKERS["gaiohttp"] = "gunicorn.workers.gaiohttp.AiohttpWorker" diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/_gaiohttp.py b/py3/lib/python3.6/site-packages/gunicorn/workers/_gaiohttp.py new file mode 100644 index 0000000..fe378c3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/_gaiohttp.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import asyncio +import datetime +import functools +import logging +import os + +try: + import ssl +except ImportError: + ssl = None + +import gunicorn.workers.base as base + +from aiohttp.wsgi import WSGIServerHttpProtocol as OldWSGIServerHttpProtocol + + +class WSGIServerHttpProtocol(OldWSGIServerHttpProtocol): + def log_access(self, request, environ, response, time): + self.logger.access(response, request, environ, datetime.timedelta(0, 0, time)) + + +class AiohttpWorker(base.Worker): + + def __init__(self, *args, **kw): # pragma: no cover + super().__init__(*args, **kw) + cfg = self.cfg + if cfg.is_ssl: + self.ssl_context = self._create_ssl_context(cfg) + else: + self.ssl_context = None + self.servers = [] + self.connections = {} + + def init_process(self): + # create new event_loop after fork + asyncio.get_event_loop().close() + + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + super().init_process() + + def run(self): + self._runner = asyncio.ensure_future(self._run(), loop=self.loop) + + try: + self.loop.run_until_complete(self._runner) + finally: + self.loop.close() + + def wrap_protocol(self, proto): + proto.connection_made = _wrp( + proto, proto.connection_made, self.connections) + proto.connection_lost = _wrp( + proto, proto.connection_lost, self.connections, False) + return proto + + def factory(self, wsgi, addr): + # are we in debug level + is_debug = self.log.loglevel == logging.DEBUG + + proto = WSGIServerHttpProtocol( + wsgi, readpayload=True, + loop=self.loop, + log=self.log, + debug=is_debug, + keep_alive=self.cfg.keepalive, + access_log=self.log.access_log, + access_log_format=self.cfg.access_log_format) + return self.wrap_protocol(proto) + + def get_factory(self, sock, addr): + return functools.partial(self.factory, self.wsgi, addr) + + @asyncio.coroutine + def close(self): + try: + if hasattr(self.wsgi, 'close'): + yield from self.wsgi.close() + except: + self.log.exception('Process shutdown exception') + + @asyncio.coroutine + def _run(self): + for sock in self.sockets: + factory = self.get_factory(sock.sock, sock.cfg_addr) + self.servers.append( + (yield from self._create_server(factory, sock))) + + # If our parent changed then we shut down. + pid = os.getpid() + try: + while self.alive or self.connections: + self.notify() + + if (self.alive and + pid == os.getpid() and self.ppid != os.getppid()): + self.log.info("Parent changed, shutting down: %s", self) + self.alive = False + + # stop accepting requests + if not self.alive: + if self.servers: + self.log.info( + "Stopping server: %s, connections: %s", + pid, len(self.connections)) + for server in self.servers: + server.close() + self.servers.clear() + + # prepare connections for closing + for conn in self.connections.values(): + if hasattr(conn, 'closing'): + conn.closing() + + yield from asyncio.sleep(1.0, loop=self.loop) + except KeyboardInterrupt: + pass + + if self.servers: + for server in self.servers: + server.close() + + yield from self.close() + + @asyncio.coroutine + def _create_server(self, factory, sock): + return self.loop.create_server(factory, sock=sock.sock, + ssl=self.ssl_context) + + @staticmethod + def _create_ssl_context(cfg): + """ Creates SSLContext instance for usage in asyncio.create_server. + + See ssl.SSLSocket.__init__ for more details. + """ + ctx = ssl.SSLContext(cfg.ssl_version) + ctx.load_cert_chain(cfg.certfile, cfg.keyfile) + ctx.verify_mode = cfg.cert_reqs + if cfg.ca_certs: + ctx.load_verify_locations(cfg.ca_certs) + if cfg.ciphers: + ctx.set_ciphers(cfg.ciphers) + return ctx + + +class _wrp: + + def __init__(self, proto, meth, tracking, add=True): + self._proto = proto + self._id = id(proto) + self._meth = meth + self._tracking = tracking + self._add = add + + def __call__(self, *args): + if self._add: + self._tracking[self._id] = self._proto + elif self._id in self._tracking: + del self._tracking[self._id] + + conn = self._meth(*args) + return conn diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/base.py b/py3/lib/python3.6/site-packages/gunicorn/workers/base.py new file mode 100644 index 0000000..881efa0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/base.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from datetime import datetime +import os +from random import randint +import signal +from ssl import SSLError +import sys +import time +import traceback + +from gunicorn import six +from gunicorn import util +from gunicorn.workers.workertmp import WorkerTmp +from gunicorn.reloader import reloader_engines +from gunicorn.http.errors import ( + InvalidHeader, InvalidHeaderName, InvalidRequestLine, InvalidRequestMethod, + InvalidHTTPVersion, LimitRequestLine, LimitRequestHeaders, +) +from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest +from gunicorn.http.errors import InvalidSchemeHeaders +from gunicorn.http.wsgi import default_environ, Response +from gunicorn.six import MAXSIZE + + +class Worker(object): + + SIGNALS = [getattr(signal, "SIG%s" % x) + for x in "ABRT HUP QUIT INT TERM USR1 USR2 WINCH CHLD".split()] + + PIPE = [] + + def __init__(self, age, ppid, sockets, app, timeout, cfg, log): + """\ + This is called pre-fork so it shouldn't do anything to the + current process. If there's a need to make process wide + changes you'll want to do that in ``self.init_process()``. + """ + self.age = age + self.pid = "[booting]" + self.ppid = ppid + self.sockets = sockets + self.app = app + self.timeout = timeout + self.cfg = cfg + self.booted = False + self.aborted = False + self.reloader = None + + self.nr = 0 + jitter = randint(0, cfg.max_requests_jitter) + self.max_requests = cfg.max_requests + jitter or MAXSIZE + self.alive = True + self.log = log + self.tmp = WorkerTmp(cfg) + + def __str__(self): + return "" % self.pid + + def notify(self): + """\ + Your worker subclass must arrange to have this method called + once every ``self.timeout`` seconds. If you fail in accomplishing + this task, the master process will murder your workers. + """ + self.tmp.notify() + + def run(self): + """\ + This is the mainloop of a worker process. You should override + this method in a subclass to provide the intended behaviour + for your particular evil schemes. + """ + raise NotImplementedError() + + def init_process(self): + """\ + If you override this method in a subclass, the last statement + in the function should be to call this method with + super(MyWorkerClass, self).init_process() so that the ``run()`` + loop is initiated. + """ + + # set environment' variables + if self.cfg.env: + for k, v in self.cfg.env.items(): + os.environ[k] = v + + util.set_owner_process(self.cfg.uid, self.cfg.gid, + initgroups=self.cfg.initgroups) + + # Reseed the random number generator + util.seed() + + # For waking ourselves up + self.PIPE = os.pipe() + for p in self.PIPE: + util.set_non_blocking(p) + util.close_on_exec(p) + + # Prevent fd inheritance + for s in self.sockets: + util.close_on_exec(s) + util.close_on_exec(self.tmp.fileno()) + + self.wait_fds = self.sockets + [self.PIPE[0]] + + self.log.close_on_exec() + + self.init_signals() + + # start the reloader + if self.cfg.reload: + def changed(fname): + self.log.info("Worker reloading: %s modified", fname) + self.alive = False + self.cfg.worker_int(self) + time.sleep(0.1) + sys.exit(0) + + reloader_cls = reloader_engines[self.cfg.reload_engine] + self.reloader = reloader_cls(extra_files=self.cfg.reload_extra_files, + callback=changed) + self.reloader.start() + + self.load_wsgi() + self.cfg.post_worker_init(self) + + # Enter main run loop + self.booted = True + self.run() + + def load_wsgi(self): + try: + self.wsgi = self.app.wsgi() + except SyntaxError as e: + if not self.cfg.reload: + raise + + self.log.exception(e) + + # fix from PR #1228 + # storing the traceback into exc_tb will create a circular reference. + # per https://docs.python.org/2/library/sys.html#sys.exc_info warning, + # delete the traceback after use. + try: + _, exc_val, exc_tb = sys.exc_info() + self.reloader.add_extra_file(exc_val.filename) + + tb_string = six.StringIO() + traceback.print_tb(exc_tb, file=tb_string) + self.wsgi = util.make_fail_app(tb_string.getvalue()) + finally: + del exc_tb + + def init_signals(self): + # reset signaling + for s in self.SIGNALS: + signal.signal(s, signal.SIG_DFL) + # init new signaling + signal.signal(signal.SIGQUIT, self.handle_quit) + signal.signal(signal.SIGTERM, self.handle_exit) + signal.signal(signal.SIGINT, self.handle_quit) + signal.signal(signal.SIGWINCH, self.handle_winch) + signal.signal(signal.SIGUSR1, self.handle_usr1) + signal.signal(signal.SIGABRT, self.handle_abort) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + if hasattr(signal, 'siginterrupt'): # python >= 2.6 + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + + if hasattr(signal, 'set_wakeup_fd'): + signal.set_wakeup_fd(self.PIPE[1]) + + def handle_usr1(self, sig, frame): + self.log.reopen_files() + + def handle_exit(self, sig, frame): + self.alive = False + + def handle_quit(self, sig, frame): + self.alive = False + # worker_int callback + self.cfg.worker_int(self) + time.sleep(0.1) + sys.exit(0) + + def handle_abort(self, sig, frame): + self.alive = False + self.cfg.worker_abort(self) + sys.exit(1) + + def handle_error(self, req, client, addr, exc): + request_start = datetime.now() + addr = addr or ('', -1) # unix socket case + if isinstance(exc, (InvalidRequestLine, InvalidRequestMethod, + InvalidHTTPVersion, InvalidHeader, InvalidHeaderName, + LimitRequestLine, LimitRequestHeaders, + InvalidProxyLine, ForbiddenProxyRequest, + InvalidSchemeHeaders, + SSLError)): + + status_int = 400 + reason = "Bad Request" + + if isinstance(exc, InvalidRequestLine): + mesg = "Invalid Request Line '%s'" % str(exc) + elif isinstance(exc, InvalidRequestMethod): + mesg = "Invalid Method '%s'" % str(exc) + elif isinstance(exc, InvalidHTTPVersion): + mesg = "Invalid HTTP Version '%s'" % str(exc) + elif isinstance(exc, (InvalidHeaderName, InvalidHeader,)): + mesg = "%s" % str(exc) + if not req and hasattr(exc, "req"): + req = exc.req # for access log + elif isinstance(exc, LimitRequestLine): + mesg = "%s" % str(exc) + elif isinstance(exc, LimitRequestHeaders): + mesg = "Error parsing headers: '%s'" % str(exc) + elif isinstance(exc, InvalidProxyLine): + mesg = "'%s'" % str(exc) + elif isinstance(exc, ForbiddenProxyRequest): + reason = "Forbidden" + mesg = "Request forbidden" + status_int = 403 + elif isinstance(exc, InvalidSchemeHeaders): + mesg = "%s" % str(exc) + elif isinstance(exc, SSLError): + reason = "Forbidden" + mesg = "'%s'" % str(exc) + status_int = 403 + + msg = "Invalid request from ip={ip}: {error}" + self.log.debug(msg.format(ip=addr[0], error=str(exc))) + else: + if hasattr(req, "uri"): + self.log.exception("Error handling request %s", req.uri) + status_int = 500 + reason = "Internal Server Error" + mesg = "" + + if req is not None: + request_time = datetime.now() - request_start + environ = default_environ(req, client, self.cfg) + environ['REMOTE_ADDR'] = addr[0] + environ['REMOTE_PORT'] = str(addr[1]) + resp = Response(req, client, self.cfg) + resp.status = "%s %s" % (status_int, reason) + resp.response_length = len(mesg) + self.log.access(resp, req, environ, request_time) + + try: + util.write_error(client, status_int, reason, mesg) + except: + self.log.debug("Failed to send error message.") + + def handle_winch(self, sig, fname): + # Ignore SIGWINCH in worker. Fixes a crash on OpenBSD. + self.log.debug("worker: SIGWINCH ignored.") diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/base_async.py b/py3/lib/python3.6/site-packages/gunicorn/workers/base_async.py new file mode 100644 index 0000000..a3a0f91 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/base_async.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from datetime import datetime +import errno +import socket +import ssl +import sys + +import gunicorn.http as http +import gunicorn.http.wsgi as wsgi +import gunicorn.util as util +import gunicorn.workers.base as base +from gunicorn import six + +ALREADY_HANDLED = object() + + +class AsyncWorker(base.Worker): + + def __init__(self, *args, **kwargs): + super(AsyncWorker, self).__init__(*args, **kwargs) + self.worker_connections = self.cfg.worker_connections + + def timeout_ctx(self): + raise NotImplementedError() + + def is_already_handled(self, respiter): + # some workers will need to overload this function to raise a StopIteration + return respiter == ALREADY_HANDLED + + def handle(self, listener, client, addr): + req = None + try: + parser = http.RequestParser(self.cfg, client) + try: + listener_name = listener.getsockname() + if not self.cfg.keepalive: + req = six.next(parser) + self.handle_request(listener_name, req, client, addr) + else: + # keepalive loop + proxy_protocol_info = {} + while True: + req = None + with self.timeout_ctx(): + req = six.next(parser) + if not req: + break + if req.proxy_protocol_info: + proxy_protocol_info = req.proxy_protocol_info + else: + req.proxy_protocol_info = proxy_protocol_info + self.handle_request(listener_name, req, client, addr) + except http.errors.NoMoreData as e: + self.log.debug("Ignored premature client disconnection. %s", e) + except StopIteration as e: + self.log.debug("Closing connection. %s", e) + except ssl.SSLError: + # pass to next try-except level + six.reraise(*sys.exc_info()) + except EnvironmentError: + # pass to next try-except level + six.reraise(*sys.exc_info()) + except Exception as e: + self.handle_error(req, client, addr, e) + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_EOF: + self.log.debug("ssl connection closed") + client.close() + else: + self.log.debug("Error processing SSL request.") + self.handle_error(req, client, addr, e) + except EnvironmentError as e: + if e.errno not in (errno.EPIPE, errno.ECONNRESET): + self.log.exception("Socket error processing request.") + else: + if e.errno == errno.ECONNRESET: + self.log.debug("Ignoring connection reset") + else: + self.log.debug("Ignoring EPIPE") + except Exception as e: + self.handle_error(req, client, addr, e) + finally: + util.close(client) + + def handle_request(self, listener_name, req, sock, addr): + request_start = datetime.now() + environ = {} + resp = None + try: + self.cfg.pre_request(self, req) + resp, environ = wsgi.create(req, sock, addr, + listener_name, self.cfg) + environ["wsgi.multithread"] = True + self.nr += 1 + if self.alive and self.nr >= self.max_requests: + self.log.info("Autorestarting worker after current request.") + resp.force_close() + self.alive = False + + if not self.cfg.keepalive: + resp.force_close() + + respiter = self.wsgi(environ, resp.start_response) + if self.is_already_handled(respiter): + return False + try: + if isinstance(respiter, environ['wsgi.file_wrapper']): + resp.write_file(respiter) + else: + for item in respiter: + resp.write(item) + resp.close() + request_time = datetime.now() - request_start + self.log.access(resp, req, environ, request_time) + finally: + if hasattr(respiter, "close"): + respiter.close() + if resp.should_close(): + raise StopIteration() + except StopIteration: + raise + except EnvironmentError: + # If the original exception was a socket.error we delegate + # handling it to the caller (where handle() might ignore it) + six.reraise(*sys.exc_info()) + except Exception: + if resp and resp.headers_sent: + # If the requests have already been sent, we should close the + # connection to indicate the error. + self.log.exception("Error handling request") + try: + sock.shutdown(socket.SHUT_RDWR) + sock.close() + except EnvironmentError: + pass + raise StopIteration() + raise + finally: + try: + self.cfg.post_request(self, req, environ, resp) + except Exception: + self.log.exception("Exception in post_request hook") + return True diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/gaiohttp.py b/py3/lib/python3.6/site-packages/gunicorn/workers/gaiohttp.py new file mode 100644 index 0000000..bef6b49 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/gaiohttp.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import sys + +from gunicorn import util + +if sys.version_info >= (3, 4): + try: + import aiohttp # pylint: disable=unused-import + except ImportError: + raise RuntimeError("You need aiohttp installed to use this worker.") + else: + try: + from aiohttp.worker import GunicornWebWorker as AiohttpWorker + except ImportError: + from gunicorn.workers._gaiohttp import AiohttpWorker + + util.warn( + "The 'gaiohttp' worker is deprecated. See --worker-class " + "documentation for more information." + ) + __all__ = ['AiohttpWorker'] +else: + raise RuntimeError("You need Python >= 3.4 to use the gaiohttp worker") diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/geventlet.py b/py3/lib/python3.6/site-packages/gunicorn/workers/geventlet.py new file mode 100644 index 0000000..189062c --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/geventlet.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from functools import partial +import errno +import sys + +try: + import eventlet +except ImportError: + raise RuntimeError("You need eventlet installed to use this worker.") + +# validate the eventlet version +if eventlet.version_info < (0, 9, 7): + raise RuntimeError("You need eventlet >= 0.9.7") + + +from eventlet import hubs, greenthread +from eventlet.greenio import GreenSocket +from eventlet.hubs import trampoline +from eventlet.wsgi import ALREADY_HANDLED as EVENTLET_ALREADY_HANDLED +import greenlet + +from gunicorn.http.wsgi import sendfile as o_sendfile +from gunicorn.workers.base_async import AsyncWorker + +def _eventlet_sendfile(fdout, fdin, offset, nbytes): + while True: + try: + return o_sendfile(fdout, fdin, offset, nbytes) + except OSError as e: + if e.args[0] == errno.EAGAIN: + trampoline(fdout, write=True) + else: + raise + + +def _eventlet_serve(sock, handle, concurrency): + """ + Serve requests forever. + + This code is nearly identical to ``eventlet.convenience.serve`` except + that it attempts to join the pool at the end, which allows for gunicorn + graceful shutdowns. + """ + pool = eventlet.greenpool.GreenPool(concurrency) + server_gt = eventlet.greenthread.getcurrent() + + while True: + try: + conn, addr = sock.accept() + gt = pool.spawn(handle, conn, addr) + gt.link(_eventlet_stop, server_gt, conn) + conn, addr, gt = None, None, None + except eventlet.StopServe: + sock.close() + pool.waitall() + return + + +def _eventlet_stop(client, server, conn): + """ + Stop a greenlet handling a request and close its connection. + + This code is lifted from eventlet so as not to depend on undocumented + functions in the library. + """ + try: + try: + client.wait() + finally: + conn.close() + except greenlet.GreenletExit: + pass + except Exception: + greenthread.kill(server, *sys.exc_info()) + + +def patch_sendfile(): + from gunicorn.http import wsgi + + if o_sendfile is not None: + setattr(wsgi, "sendfile", _eventlet_sendfile) + + +class EventletWorker(AsyncWorker): + + def patch(self): + hubs.use_hub() + eventlet.monkey_patch(os=False) + patch_sendfile() + + def is_already_handled(self, respiter): + if respiter == EVENTLET_ALREADY_HANDLED: + raise StopIteration() + else: + return super(EventletWorker, self).is_already_handled(respiter) + + def init_process(self): + super(EventletWorker, self).init_process() + self.patch() + + def handle_quit(self, sig, frame): + eventlet.spawn(super(EventletWorker, self).handle_quit, sig, frame) + + def handle_usr1(self, sig, frame): + eventlet.spawn(super(EventletWorker, self).handle_usr1, sig, frame) + + def timeout_ctx(self): + return eventlet.Timeout(self.cfg.keepalive or None, False) + + def handle(self, listener, client, addr): + if self.cfg.is_ssl: + client = eventlet.wrap_ssl(client, server_side=True, + **self.cfg.ssl_options) + + super(EventletWorker, self).handle(listener, client, addr) + + def run(self): + acceptors = [] + for sock in self.sockets: + gsock = GreenSocket(sock) + gsock.setblocking(1) + hfun = partial(self.handle, gsock) + acceptor = eventlet.spawn(_eventlet_serve, gsock, hfun, + self.worker_connections) + + acceptors.append(acceptor) + eventlet.sleep(0.0) + + while self.alive: + self.notify() + eventlet.sleep(1.0) + + self.notify() + try: + with eventlet.Timeout(self.cfg.graceful_timeout) as t: + for a in acceptors: + a.kill(eventlet.StopServe()) + for a in acceptors: + a.wait() + except eventlet.Timeout as te: + if te != t: + raise + for a in acceptors: + a.kill() diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/ggevent.py b/py3/lib/python3.6/site-packages/gunicorn/workers/ggevent.py new file mode 100644 index 0000000..fb9d919 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/ggevent.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import errno +import os +import sys +from datetime import datetime +from functools import partial +import time + +_socket = __import__("socket") + +# workaround on osx, disable kqueue +if sys.platform == "darwin": + os.environ['EVENT_NOKQUEUE'] = "1" + +try: + import gevent +except ImportError: + raise RuntimeError("You need gevent installed to use this worker.") +from gevent.pool import Pool +from gevent.server import StreamServer +from gevent.socket import wait_write, socket +from gevent import pywsgi + +import gunicorn +from gunicorn.http.wsgi import base_environ +from gunicorn.workers.base_async import AsyncWorker +from gunicorn.http.wsgi import sendfile as o_sendfile + +VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__) + +def _gevent_sendfile(fdout, fdin, offset, nbytes): + while True: + try: + return o_sendfile(fdout, fdin, offset, nbytes) + except OSError as e: + if e.args[0] == errno.EAGAIN: + wait_write(fdout) + else: + raise + +def patch_sendfile(): + from gunicorn.http import wsgi + + if o_sendfile is not None: + setattr(wsgi, "sendfile", _gevent_sendfile) + + +class GeventWorker(AsyncWorker): + + server_class = None + wsgi_handler = None + + def patch(self): + from gevent import monkey + monkey.noisy = False + + # if the new version is used make sure to patch subprocess + if gevent.version_info[0] == 0: + monkey.patch_all() + else: + monkey.patch_all(subprocess=True) + + # monkey patch sendfile to make it none blocking + patch_sendfile() + + # patch sockets + sockets = [] + for s in self.sockets: + if sys.version_info[0] == 3: + sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM, + fileno=s.sock.fileno())) + else: + sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM, + _sock=s)) + self.sockets = sockets + + def notify(self): + super(GeventWorker, self).notify() + if self.ppid != os.getppid(): + self.log.info("Parent changed, shutting down: %s", self) + sys.exit(0) + + def timeout_ctx(self): + return gevent.Timeout(self.cfg.keepalive, False) + + def run(self): + servers = [] + ssl_args = {} + + if self.cfg.is_ssl: + ssl_args = dict(server_side=True, **self.cfg.ssl_options) + + for s in self.sockets: + s.setblocking(1) + pool = Pool(self.worker_connections) + if self.server_class is not None: + environ = base_environ(self.cfg) + environ.update({ + "wsgi.multithread": True, + "SERVER_SOFTWARE": VERSION, + }) + server = self.server_class( + s, application=self.wsgi, spawn=pool, log=self.log, + handler_class=self.wsgi_handler, environ=environ, + **ssl_args) + else: + hfun = partial(self.handle, s) + server = StreamServer(s, handle=hfun, spawn=pool, **ssl_args) + + server.start() + servers.append(server) + + while self.alive: + self.notify() + gevent.sleep(1.0) + + try: + # Stop accepting requests + for server in servers: + if hasattr(server, 'close'): # gevent 1.0 + server.close() + if hasattr(server, 'kill'): # gevent < 1.0 + server.kill() + + # Handle current requests until graceful_timeout + ts = time.time() + while time.time() - ts <= self.cfg.graceful_timeout: + accepting = 0 + for server in servers: + if server.pool.free_count() != server.pool.size: + accepting += 1 + + # if no server is accepting a connection, we can exit + if not accepting: + return + + self.notify() + gevent.sleep(1.0) + + # Force kill all active the handlers + self.log.warning("Worker graceful timeout (pid:%s)" % self.pid) + for server in servers: + server.stop(timeout=1) + except: + pass + + def handle(self, listener, client, addr): + # Connected socket timeout defaults to socket.getdefaulttimeout(). + # This forces to blocking mode. + client.setblocking(1) + super(GeventWorker, self).handle(listener, client, addr) + + def handle_request(self, listener_name, req, sock, addr): + try: + super(GeventWorker, self).handle_request(listener_name, req, sock, + addr) + except gevent.GreenletExit: + pass + except SystemExit: + pass + + def handle_quit(self, sig, frame): + # Move this out of the signal handler so we can use + # blocking calls. See #1126 + gevent.spawn(super(GeventWorker, self).handle_quit, sig, frame) + + def handle_usr1(self, sig, frame): + # Make the gevent workers handle the usr1 signal + # by deferring to a new greenlet. See #1645 + gevent.spawn(super(GeventWorker, self).handle_usr1, sig, frame) + + if gevent.version_info[0] == 0: + + def init_process(self): + # monkey patch here + self.patch() + + # reinit the hub + import gevent.core + gevent.core.reinit() + + #gevent 0.13 and older doesn't reinitialize dns for us after forking + #here's the workaround + gevent.core.dns_shutdown(fail_requests=1) + gevent.core.dns_init() + super(GeventWorker, self).init_process() + + else: + + def init_process(self): + # monkey patch here + self.patch() + + # reinit the hub + from gevent import hub + hub.reinit() + + # then initialize the process + super(GeventWorker, self).init_process() + + +class GeventResponse(object): + + status = None + headers = None + sent = None + + def __init__(self, status, headers, clength): + self.status = status + self.headers = headers + self.sent = clength + + +class PyWSGIHandler(pywsgi.WSGIHandler): + + def log_request(self): + start = datetime.fromtimestamp(self.time_start) + finish = datetime.fromtimestamp(self.time_finish) + response_time = finish - start + resp_headers = getattr(self, 'response_headers', {}) + resp = GeventResponse(self.status, resp_headers, self.response_length) + if hasattr(self, 'headers'): + req_headers = self.headers.items() + else: + req_headers = [] + self.server.log.access(resp, req_headers, self.environ, response_time) + + def get_environ(self): + env = super(PyWSGIHandler, self).get_environ() + env['gunicorn.sock'] = self.socket + env['RAW_URI'] = self.path + return env + + +class PyWSGIServer(pywsgi.WSGIServer): + pass + + +class GeventPyWSGIWorker(GeventWorker): + "The Gevent StreamServer based workers." + server_class = PyWSGIServer + wsgi_handler = PyWSGIHandler diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/gthread.py b/py3/lib/python3.6/site-packages/gunicorn/workers/gthread.py new file mode 100644 index 0000000..862f873 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/gthread.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# design: +# a threaded worker accepts connections in the main loop, accepted +# connections are are added to the thread pool as a connection job. On +# keepalive connections are put back in the loop waiting for an event. +# If no event happen after the keep alive timeout, the connectoin is +# closed. + +from collections import deque +from datetime import datetime +import errno +from functools import partial +import os +import socket +import ssl +import sys +from threading import RLock +import time + +from .. import http +from ..http import wsgi +from .. import util +from . import base +from .. import six + + +try: + import concurrent.futures as futures +except ImportError: + raise RuntimeError(""" + You need to install the 'futures' package to use this worker with this + Python version. + """) + +try: + from asyncio import selectors +except ImportError: + from gunicorn import selectors + + +class TConn(object): + + def __init__(self, cfg, sock, client, server): + self.cfg = cfg + self.sock = sock + self.client = client + self.server = server + + self.timeout = None + self.parser = None + + # set the socket to non blocking + self.sock.setblocking(False) + + def init(self): + self.sock.setblocking(True) + if self.parser is None: + # wrap the socket if needed + if self.cfg.is_ssl: + self.sock = ssl.wrap_socket(self.sock, server_side=True, + **self.cfg.ssl_options) + + # initialize the parser + self.parser = http.RequestParser(self.cfg, self.sock) + + def set_timeout(self): + # set the timeout + self.timeout = time.time() + self.cfg.keepalive + + def close(self): + util.close(self.sock) + + +class ThreadWorker(base.Worker): + + def __init__(self, *args, **kwargs): + super(ThreadWorker, self).__init__(*args, **kwargs) + self.worker_connections = self.cfg.worker_connections + self.max_keepalived = self.cfg.worker_connections - self.cfg.threads + # initialise the pool + self.tpool = None + self.poller = None + self._lock = None + self.futures = deque() + self._keep = deque() + self.nr_conns = 0 + + @classmethod + def check_config(cls, cfg, log): + max_keepalived = cfg.worker_connections - cfg.threads + + if max_keepalived <= 0 and cfg.keepalive: + log.warning("No keepalived connections can be handled. " + + "Check the number of worker connections and threads.") + + def init_process(self): + self.tpool = futures.ThreadPoolExecutor(max_workers=self.cfg.threads) + self.poller = selectors.DefaultSelector() + self._lock = RLock() + super(ThreadWorker, self).init_process() + + def handle_quit(self, sig, frame): + self.alive = False + # worker_int callback + self.cfg.worker_int(self) + self.tpool.shutdown(False) + time.sleep(0.1) + sys.exit(0) + + def _wrap_future(self, fs, conn): + fs.conn = conn + self.futures.append(fs) + fs.add_done_callback(self.finish_request) + + def enqueue_req(self, conn): + conn.init() + # submit the connection to a worker + fs = self.tpool.submit(self.handle, conn) + self._wrap_future(fs, conn) + + def accept(self, server, listener): + try: + sock, client = listener.accept() + # initialize the connection object + conn = TConn(self.cfg, sock, client, server) + self.nr_conns += 1 + # enqueue the job + self.enqueue_req(conn) + except EnvironmentError as e: + if e.errno not in (errno.EAGAIN, + errno.ECONNABORTED, errno.EWOULDBLOCK): + raise + + def reuse_connection(self, conn, client): + with self._lock: + # unregister the client from the poller + self.poller.unregister(client) + # remove the connection from keepalive + try: + self._keep.remove(conn) + except ValueError: + # race condition + return + + # submit the connection to a worker + self.enqueue_req(conn) + + def murder_keepalived(self): + now = time.time() + while True: + with self._lock: + try: + # remove the connection from the queue + conn = self._keep.popleft() + except IndexError: + break + + delta = conn.timeout - now + if delta > 0: + # add the connection back to the queue + with self._lock: + self._keep.appendleft(conn) + break + else: + self.nr_conns -= 1 + # remove the socket from the poller + with self._lock: + try: + self.poller.unregister(conn.sock) + except EnvironmentError as e: + if e.errno != errno.EBADF: + raise + except KeyError: + # already removed by the system, continue + pass + + # close the socket + conn.close() + + def is_parent_alive(self): + # If our parent changed then we shut down. + if self.ppid != os.getppid(): + self.log.info("Parent changed, shutting down: %s", self) + return False + return True + + def run(self): + # init listeners, add them to the event loop + for sock in self.sockets: + sock.setblocking(False) + # a race condition during graceful shutdown may make the listener + # name unavailable in the request handler so capture it once here + server = sock.getsockname() + acceptor = partial(self.accept, server) + self.poller.register(sock, selectors.EVENT_READ, acceptor) + + while self.alive: + # notify the arbiter we are alive + self.notify() + + # can we accept more connections? + if self.nr_conns < self.worker_connections: + # wait for an event + events = self.poller.select(1.0) + for key, _ in events: + callback = key.data + callback(key.fileobj) + + # check (but do not wait) for finished requests + result = futures.wait(self.futures, timeout=0, + return_when=futures.FIRST_COMPLETED) + else: + # wait for a request to finish + result = futures.wait(self.futures, timeout=1.0, + return_when=futures.FIRST_COMPLETED) + + # clean up finished requests + for fut in result.done: + self.futures.remove(fut) + + if not self.is_parent_alive(): + break + + # hanle keepalive timeouts + self.murder_keepalived() + + self.tpool.shutdown(False) + self.poller.close() + + for s in self.sockets: + s.close() + + futures.wait(self.futures, timeout=self.cfg.graceful_timeout) + + def finish_request(self, fs): + if fs.cancelled(): + self.nr_conns -= 1 + fs.conn.close() + return + + try: + (keepalive, conn) = fs.result() + # if the connection should be kept alived add it + # to the eventloop and record it + if keepalive: + # flag the socket as non blocked + conn.sock.setblocking(False) + + # register the connection + conn.set_timeout() + with self._lock: + self._keep.append(conn) + + # add the socket to the event loop + self.poller.register(conn.sock, selectors.EVENT_READ, + partial(self.reuse_connection, conn)) + else: + self.nr_conns -= 1 + conn.close() + except: + # an exception happened, make sure to close the + # socket. + self.nr_conns -= 1 + fs.conn.close() + + def handle(self, conn): + keepalive = False + req = None + try: + req = six.next(conn.parser) + if not req: + return (False, conn) + + # handle the request + keepalive = self.handle_request(req, conn) + if keepalive: + return (keepalive, conn) + except http.errors.NoMoreData as e: + self.log.debug("Ignored premature client disconnection. %s", e) + + except StopIteration as e: + self.log.debug("Closing connection. %s", e) + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_EOF: + self.log.debug("ssl connection closed") + conn.sock.close() + else: + self.log.debug("Error processing SSL request.") + self.handle_error(req, conn.sock, conn.client, e) + + except EnvironmentError as e: + if e.errno not in (errno.EPIPE, errno.ECONNRESET): + self.log.exception("Socket error processing request.") + else: + if e.errno == errno.ECONNRESET: + self.log.debug("Ignoring connection reset") + else: + self.log.debug("Ignoring connection epipe") + except Exception as e: + self.handle_error(req, conn.sock, conn.client, e) + + return (False, conn) + + def handle_request(self, req, conn): + environ = {} + resp = None + try: + self.cfg.pre_request(self, req) + request_start = datetime.now() + resp, environ = wsgi.create(req, conn.sock, conn.client, + conn.server, self.cfg) + environ["wsgi.multithread"] = True + self.nr += 1 + if self.alive and self.nr >= self.max_requests: + self.log.info("Autorestarting worker after current request.") + resp.force_close() + self.alive = False + + if not self.cfg.keepalive: + resp.force_close() + elif len(self._keep) >= self.max_keepalived: + resp.force_close() + + respiter = self.wsgi(environ, resp.start_response) + try: + if isinstance(respiter, environ['wsgi.file_wrapper']): + resp.write_file(respiter) + else: + for item in respiter: + resp.write(item) + + resp.close() + request_time = datetime.now() - request_start + self.log.access(resp, req, environ, request_time) + finally: + if hasattr(respiter, "close"): + respiter.close() + + if resp.should_close(): + self.log.debug("Closing connection.") + return False + except EnvironmentError: + # pass to next try-except level + six.reraise(*sys.exc_info()) + except Exception: + if resp and resp.headers_sent: + # If the requests have already been sent, we should close the + # connection to indicate the error. + self.log.exception("Error handling request") + try: + conn.sock.shutdown(socket.SHUT_RDWR) + conn.sock.close() + except EnvironmentError: + pass + raise StopIteration() + raise + finally: + try: + self.cfg.post_request(self, req, environ, resp) + except Exception: + self.log.exception("Exception in post_request hook") + + return True diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/gtornado.py b/py3/lib/python3.6/site-packages/gunicorn/workers/gtornado.py new file mode 100644 index 0000000..7c1b118 --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/gtornado.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import copy +import os +import sys + +try: + import tornado +except ImportError: + raise RuntimeError("You need tornado installed to use this worker.") +import tornado.web +import tornado.httpserver +from tornado.ioloop import IOLoop, PeriodicCallback +from tornado.wsgi import WSGIContainer +from gunicorn.workers.base import Worker +from gunicorn import __version__ as gversion + + +# `io_loop` arguments to many Tornado functions have been removed in Tornado 5.0 +# +IOLOOP_PARAMETER_REMOVED = tornado.version_info >= (5, 0, 0) + + +class TornadoWorker(Worker): + + @classmethod + def setup(cls): + web = sys.modules.pop("tornado.web") + old_clear = web.RequestHandler.clear + + def clear(self): + old_clear(self) + if "Gunicorn" not in self._headers["Server"]: + self._headers["Server"] += " (Gunicorn/%s)" % gversion + web.RequestHandler.clear = clear + sys.modules["tornado.web"] = web + + def handle_exit(self, sig, frame): + if self.alive: + super(TornadoWorker, self).handle_exit(sig, frame) + + def handle_request(self): + self.nr += 1 + if self.alive and self.nr >= self.max_requests: + self.log.info("Autorestarting worker after current request.") + self.alive = False + + def watchdog(self): + if self.alive: + self.notify() + + if self.ppid != os.getppid(): + self.log.info("Parent changed, shutting down: %s", self) + self.alive = False + + def heartbeat(self): + if not self.alive: + if self.server_alive: + if hasattr(self, 'server'): + try: + self.server.stop() + except Exception: + pass + self.server_alive = False + else: + if not self.ioloop._callbacks: + self.ioloop.stop() + + def run(self): + self.ioloop = IOLoop.instance() + self.alive = True + self.server_alive = False + if IOLOOP_PARAMETER_REMOVED: + PeriodicCallback(self.watchdog, 1000).start() + PeriodicCallback(self.heartbeat, 1000).start() + else: + PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start() + PeriodicCallback(self.heartbeat, 1000, io_loop=self.ioloop).start() + + # Assume the app is a WSGI callable if its not an + # instance of tornado.web.Application or is an + # instance of tornado.wsgi.WSGIApplication + app = self.wsgi + if not isinstance(app, tornado.web.Application) or \ + isinstance(app, tornado.wsgi.WSGIApplication): + app = WSGIContainer(app) + + # Monkey-patching HTTPConnection.finish to count the + # number of requests being handled by Tornado. This + # will help gunicorn shutdown the worker if max_requests + # is exceeded. + httpserver = sys.modules["tornado.httpserver"] + if hasattr(httpserver, 'HTTPConnection'): + old_connection_finish = httpserver.HTTPConnection.finish + + def finish(other): + self.handle_request() + old_connection_finish(other) + httpserver.HTTPConnection.finish = finish + sys.modules["tornado.httpserver"] = httpserver + + server_class = tornado.httpserver.HTTPServer + else: + + class _HTTPServer(tornado.httpserver.HTTPServer): + + def on_close(instance, server_conn): + self.handle_request() + super(_HTTPServer, instance).on_close(server_conn) + + server_class = _HTTPServer + + if self.cfg.is_ssl: + _ssl_opt = copy.deepcopy(self.cfg.ssl_options) + # tornado refuses initialization if ssl_options contains following + # options + del _ssl_opt["do_handshake_on_connect"] + del _ssl_opt["suppress_ragged_eofs"] + if IOLOOP_PARAMETER_REMOVED: + server = server_class(app, ssl_options=_ssl_opt) + else: + server = server_class(app, io_loop=self.ioloop, + ssl_options=_ssl_opt) + else: + if IOLOOP_PARAMETER_REMOVED: + server = server_class(app) + else: + server = server_class(app, io_loop=self.ioloop) + + self.server = server + self.server_alive = True + + for s in self.sockets: + s.setblocking(0) + if hasattr(server, "add_socket"): # tornado > 2.0 + server.add_socket(s) + elif hasattr(server, "_sockets"): # tornado 2.0 + server._sockets[s.fileno()] = s + + server.no_keep_alive = self.cfg.keepalive <= 0 + server.start(num_processes=1) + + self.ioloop.start() diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/sync.py b/py3/lib/python3.6/site-packages/gunicorn/workers/sync.py new file mode 100644 index 0000000..1d2ce2f --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/sync.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +# + +from datetime import datetime +import errno +import os +import select +import socket +import ssl +import sys + +import gunicorn.http as http +import gunicorn.http.wsgi as wsgi +import gunicorn.util as util +import gunicorn.workers.base as base +from gunicorn import six + +class StopWaiting(Exception): + """ exception raised to stop waiting for a connnection """ + +class SyncWorker(base.Worker): + + def accept(self, listener): + client, addr = listener.accept() + client.setblocking(1) + util.close_on_exec(client) + self.handle(listener, client, addr) + + def wait(self, timeout): + try: + self.notify() + ret = select.select(self.wait_fds, [], [], timeout) + if ret[0]: + if self.PIPE[0] in ret[0]: + os.read(self.PIPE[0], 1) + return ret[0] + + except select.error as e: + if e.args[0] == errno.EINTR: + return self.sockets + if e.args[0] == errno.EBADF: + if self.nr < 0: + return self.sockets + else: + raise StopWaiting + raise + + def is_parent_alive(self): + # If our parent changed then we shut down. + if self.ppid != os.getppid(): + self.log.info("Parent changed, shutting down: %s", self) + return False + return True + + def run_for_one(self, timeout): + listener = self.sockets[0] + while self.alive: + self.notify() + + # Accept a connection. If we get an error telling us + # that no connection is waiting we fall down to the + # select which is where we'll wait for a bit for new + # workers to come give us some love. + try: + self.accept(listener) + # Keep processing clients until no one is waiting. This + # prevents the need to select() for every client that we + # process. + continue + + except EnvironmentError as e: + if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, + errno.EWOULDBLOCK): + raise + + if not self.is_parent_alive(): + return + + try: + self.wait(timeout) + except StopWaiting: + return + + def run_for_multiple(self, timeout): + while self.alive: + self.notify() + + try: + ready = self.wait(timeout) + except StopWaiting: + return + + if ready is not None: + for listener in ready: + if listener == self.PIPE[0]: + continue + + try: + self.accept(listener) + except EnvironmentError as e: + if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, + errno.EWOULDBLOCK): + raise + + if not self.is_parent_alive(): + return + + def run(self): + # if no timeout is given the worker will never wait and will + # use the CPU for nothing. This minimal timeout prevent it. + timeout = self.timeout or 0.5 + + # self.socket appears to lose its blocking status after + # we fork in the arbiter. Reset it here. + for s in self.sockets: + s.setblocking(0) + + if len(self.sockets) > 1: + self.run_for_multiple(timeout) + else: + self.run_for_one(timeout) + + def handle(self, listener, client, addr): + req = None + try: + if self.cfg.is_ssl: + client = ssl.wrap_socket(client, server_side=True, + **self.cfg.ssl_options) + + parser = http.RequestParser(self.cfg, client) + req = six.next(parser) + self.handle_request(listener, req, client, addr) + except http.errors.NoMoreData as e: + self.log.debug("Ignored premature client disconnection. %s", e) + except StopIteration as e: + self.log.debug("Closing connection. %s", e) + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_EOF: + self.log.debug("ssl connection closed") + client.close() + else: + self.log.debug("Error processing SSL request.") + self.handle_error(req, client, addr, e) + except EnvironmentError as e: + if e.errno not in (errno.EPIPE, errno.ECONNRESET): + self.log.exception("Socket error processing request.") + else: + if e.errno == errno.ECONNRESET: + self.log.debug("Ignoring connection reset") + else: + self.log.debug("Ignoring EPIPE") + except Exception as e: + self.handle_error(req, client, addr, e) + finally: + util.close(client) + + def handle_request(self, listener, req, client, addr): + environ = {} + resp = None + try: + self.cfg.pre_request(self, req) + request_start = datetime.now() + resp, environ = wsgi.create(req, client, addr, + listener.getsockname(), self.cfg) + # Force the connection closed until someone shows + # a buffering proxy that supports Keep-Alive to + # the backend. + resp.force_close() + self.nr += 1 + if self.nr >= self.max_requests: + self.log.info("Autorestarting worker after current request.") + self.alive = False + respiter = self.wsgi(environ, resp.start_response) + try: + if isinstance(respiter, environ['wsgi.file_wrapper']): + resp.write_file(respiter) + else: + for item in respiter: + resp.write(item) + resp.close() + request_time = datetime.now() - request_start + self.log.access(resp, req, environ, request_time) + finally: + if hasattr(respiter, "close"): + respiter.close() + except EnvironmentError: + # pass to next try-except level + six.reraise(*sys.exc_info()) + except Exception: + if resp and resp.headers_sent: + # If the requests have already been sent, we should close the + # connection to indicate the error. + self.log.exception("Error handling request") + try: + client.shutdown(socket.SHUT_RDWR) + client.close() + except EnvironmentError: + pass + raise StopIteration() + raise + finally: + try: + self.cfg.post_request(self, req, environ, resp) + except Exception: + self.log.exception("Exception in post_request hook") diff --git a/py3/lib/python3.6/site-packages/gunicorn/workers/workertmp.py b/py3/lib/python3.6/site-packages/gunicorn/workers/workertmp.py new file mode 100644 index 0000000..36bc97a --- /dev/null +++ b/py3/lib/python3.6/site-packages/gunicorn/workers/workertmp.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os +import platform +import tempfile + +from gunicorn import util + +PLATFORM = platform.system() +IS_CYGWIN = PLATFORM.startswith('CYGWIN') + + +class WorkerTmp(object): + + def __init__(self, cfg): + old_umask = os.umask(cfg.umask) + fdir = cfg.worker_tmp_dir + if fdir and not os.path.isdir(fdir): + raise RuntimeError("%s doesn't exist. Can't create workertmp." % fdir) + fd, name = tempfile.mkstemp(prefix="wgunicorn-", dir=fdir) + + # allows the process to write to the file + util.chown(name, cfg.uid, cfg.gid) + os.umask(old_umask) + + # unlink the file so we don't leak tempory files + try: + if not IS_CYGWIN: + util.unlink(name) + self._tmp = os.fdopen(fd, 'w+b', 1) + except: + os.close(fd) + raise + + self.spinner = 0 + + def notify(self): + try: + self.spinner = (self.spinner + 1) % 2 + os.fchmod(self._tmp.fileno(), self.spinner) + except AttributeError: + # python < 2.6 + self._tmp.truncate(0) + os.write(self._tmp.fileno(), b"X") + + def last_update(self): + return os.fstat(self._tmp.fileno()).st_ctime + + def fileno(self): + return self._tmp.fileno() + + def close(self): + return self._tmp.close() diff --git a/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/LICENSE.txt b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..8f080ea --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/METADATA b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/METADATA new file mode 100644 index 0000000..c370bee --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/METADATA @@ -0,0 +1,193 @@ +Metadata-Version: 2.1 +Name: h11 +Version: 0.9.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.4-3.7), Python 2.7, and PyPy. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. + + diff --git a/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/RECORD b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/RECORD new file mode 100644 index 0000000..c05e608 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/RECORD @@ -0,0 +1,51 @@ +h11/__init__.py,sha256=3gYpvQiX8_6-dyXaAxQt_sIYREVTz1T-zB5Lf4hjKt0,909 +h11/_abnf.py,sha256=tMKqgOEkTHHp8sPd_gmU9Qowe_yXXrihct63RX2zJsg,4637 +h11/_connection.py,sha256=igtLgTM-3tpiDvJA9R5toMQUiJw9o4m2wFD8izlelI0,24986 +h11/_events.py,sha256=BjWc_btWJW_N-CRUPRo68fKF8vv6hlcMs19b9oNBymc,9910 +h11/_headers.py,sha256=4LGuElGbwCdMT3pHiTc3ghUN3f7uPMLoYnkoR86YXXs,6823 +h11/_readers.py,sha256=_lYHBTdXFjF3V8z8mn9-TGW5XrskWVggq3pL8yUIgGA,7214 +h11/_receivebuffer.py,sha256=hxloIiW5b5g_cdbFAzPw-tv6YpKnDIsN4z1FUWlopcg,3911 +h11/_state.py,sha256=7YG1pJzTbwx46U_HIqKViJvf62nqGJd0g9KZ3TRR5i4,12175 +h11/_util.py,sha256=D6Pfn5Jc1TIdMjrUEpg1J_KcMvBIbCXHA9Aac5GTedM,4994 +h11/_version.py,sha256=8QshbQupywjssaujkSQCtTpWC_MRedlwIOO5IPhkeIA,685 +h11/_writers.py,sha256=k2j07RoKw6dsW_soJ6RIutPt6QddOJlZv-gP7t_I9OY,4683 +h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +h11/tests/helpers.py,sha256=nKheRzldPf278C81d_9_Mb9yWsYJ5udwKg_oq-fAz-U,2528 +h11/tests/test_against_stdlib_http.py,sha256=es30cmRVSTcrkNqoH5AGe_uT3ljR0OaLf2tID_17Owo,4010 +h11/tests/test_connection.py,sha256=24pNvHl9DV8rz0urlPBiAJeGNnODzWD2AiyfDu7AHmc,35578 +h11/tests/test_events.py,sha256=VUmiZfqBs0OM3Zxiwh6QviP1kZGbBqoMjBpz0OgwepE,4780 +h11/tests/test_headers.py,sha256=VH1T4kYh3B_PMU8PCmNwIS3YwXWsPvc-QZR2i79G6Vw,4847 +h11/tests/test_helpers.py,sha256=mPOAiv4HtyG0_T23K_ihh1JUs0y71ykD47c9r3iVtz0,573 +h11/tests/test_io.py,sha256=X2K26_1-yUPgv9KH52pNdiw7h-j33thu69MRmUEiVz4,14432 +h11/tests/test_receivebuffer.py,sha256=FId-lVBL3gBKmKD8gmmuZW_vdPYDtywxC3HozHI4Im0,2099 +h11/tests/test_state.py,sha256=JMKqA2d2wtskf7FbsAr1s9qsIul4WtwdXVAOCUJgalk,8551 +h11/tests/test_util.py,sha256=nlyS9b0KaP4rpsyRae0KmJ0ANNnrUjDmYaKFzz-l2RM,2711 +h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 +h11-0.9.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.9.0.dist-info/METADATA,sha256=H9b57xIB1kDea2qMZQnzwatUmGadiuw-jU30EmALToY,8085 +h11-0.9.0.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +h11-0.9.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11-0.9.0.dist-info/RECORD,, +h11-0.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11/__pycache__/_headers.cpython-36.pyc,, +h11/__pycache__/_state.cpython-36.pyc,, +h11/__pycache__/_connection.cpython-36.pyc,, +h11/__pycache__/_events.cpython-36.pyc,, +h11/__pycache__/_abnf.cpython-36.pyc,, +h11/__pycache__/_receivebuffer.cpython-36.pyc,, +h11/__pycache__/_writers.cpython-36.pyc,, +h11/__pycache__/_readers.cpython-36.pyc,, +h11/__pycache__/_util.cpython-36.pyc,, +h11/__pycache__/_version.cpython-36.pyc,, +h11/__pycache__/__init__.cpython-36.pyc,, +h11/tests/__pycache__/test_helpers.cpython-36.pyc,, +h11/tests/__pycache__/test_events.cpython-36.pyc,, +h11/tests/__pycache__/test_receivebuffer.cpython-36.pyc,, +h11/tests/__pycache__/helpers.cpython-36.pyc,, +h11/tests/__pycache__/test_io.cpython-36.pyc,, +h11/tests/__pycache__/test_state.cpython-36.pyc,, +h11/tests/__pycache__/test_headers.cpython-36.pyc,, +h11/tests/__pycache__/test_connection.cpython-36.pyc,, +h11/tests/__pycache__/__init__.cpython-36.pyc,, +h11/tests/__pycache__/test_util.cpython-36.pyc,, +h11/tests/__pycache__/test_against_stdlib_http.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/WHEEL b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/WHEEL new file mode 100644 index 0000000..78e6f69 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/top_level.txt new file mode 100644 index 0000000..0d24def --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11-0.9.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/py3/lib/python3.6/site-packages/h11/__init__.py b/py3/lib/python3.6/site-packages/h11/__init__.py new file mode 100644 index 0000000..ae39e01 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/__init__.py @@ -0,0 +1,21 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from ._connection import * +from ._events import * +from ._state import * +from ._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from ._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ["ProtocolError", "LocalProtocolError", "RemoteProtocolError"] +__all__ += _events.__all__ +__all__ += _connection.__all__ +__all__ += _state.__all__ diff --git a/py3/lib/python3.6/site-packages/h11/_abnf.py b/py3/lib/python3.6/site-packages/h11/_abnf.py new file mode 100644 index 0000000..e6d49e1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_abnf.py @@ -0,0 +1,129 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"\r\n".format(**globals()) +) diff --git a/py3/lib/python3.6/site-packages/h11/_connection.py b/py3/lib/python3.6/site-packages/h11/_connection.py new file mode 100644 index 0000000..4fc629a --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_connection.py @@ -0,0 +1,586 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. + +from ._events import * # Import all event types +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS +from ._receivebuffer import ReceiveBuffer +from ._state import * # Import all state sentinels +from ._state import _SWITCH_CONNECT, _SWITCH_UPGRADE, ConnectionState +from ._util import ( # Import the internal things we need + LocalProtocolError, + make_sentinel, + RemoteProtocolError, +) +from ._writers import WRITERS + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + +NEED_DATA = make_sentinel("NEED_DATA") +PAUSED = make_sentinel("PAUSED") + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/reponse line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event): + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing(request_method, event): + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection(object): + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, our_role, max_incomplete_event_size=DEFAULT_MAX_INCOMPLETE_EVENT_SIZE + ): + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) + self.our_role = our_role + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version = None + self._request_method = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self): + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self): + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self): + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self): + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self): + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role): + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event): + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role, event): + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + # self._request_method + if type(event) is Request: + self._request_method = event.method + + # self.their_http_version + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive(event): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object(self, role, event, io_dict): + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing(self._request_method, event) + return io_dict[SEND_BODY][framing_type](*args) + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes(self, old_states, event=None): + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self): + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data): + """Add data to our internal recieve buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event(self): + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event + + def next_event(self): + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, event) + self._receive_buffer.compress() + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + def send(self, event): + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event): + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self): + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. It mutates the response event in-place (but not the + # response.headers list object). + def _clean_up_response_headers_for_sending(self, response): + assert type(response) is Response + + headers = list(response.headers) + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = self._request_method + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + set_comma_header(headers, b"transfer-encoding", ["chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + set_comma_header(headers, b"connection", sorted(connection)) + + response.headers = headers diff --git a/py3/lib/python3.6/site-packages/h11/_events.py b/py3/lib/python3.6/site-packages/h11/_events.py new file mode 100644 index 0000000..c11d838 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_events.py @@ -0,0 +1,305 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re + +from . import _headers +from ._abnf import request_target +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +request_target_re = re.compile(request_target.encode("ascii")) + + +class _EventBundle(object): + _fields = [] + _defaults = {} + + def __init__(self, **kwargs): + _parsed = kwargs.pop("_parsed", False) + allowed = set(self._fields) + for kwarg in kwargs: + if kwarg not in allowed: + raise TypeError( + "unrecognized kwarg {} for {}".format( + kwarg, self.__class__.__name__ + ) + ) + required = allowed.difference(self._defaults) + for field in required: + if field not in kwargs: + raise TypeError( + "missing required kwarg {} for {}".format( + field, self.__class__.__name__ + ) + ) + self.__dict__.update(self._defaults) + self.__dict__.update(kwargs) + + # Special handling for some fields + + if "headers" in self.__dict__: + self.headers = _headers.normalize_and_validate( + self.headers, _parsed=_parsed + ) + + if not _parsed: + for field in ["method", "target", "http_version", "reason"]: + if field in self.__dict__: + self.__dict__[field] = bytesify(self.__dict__[field]) + + if "status_code" in self.__dict__: + if not isinstance(self.status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + self.status_code = int(self.status_code) + + self._validate() + + def _validate(self): + pass + + def __repr__(self): + name = self.__class__.__name__ + kwarg_strs = [ + "{}={}".format(field, self.__dict__[field]) for field in self._fields + ] + kwarg_str = ", ".join(kwarg_strs) + return "{}({})".format(name, kwarg_str) + + # Useful for tests + def __eq__(self, other): + return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not self.__eq__(other) + + # This is an unhashable type. + __hash__ = None + + +class Request(_EventBundle): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + _fields = ["method", "target", "headers", "http_version"] + _defaults = {"http_version": b"1.1"} + + def _validate(self): + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(request_target_re, self.target, "Illegal target characters") + + +class _ResponseBase(_EventBundle): + _fields = ["status_code", "headers", "http_version", "reason"] + _defaults = {"http_version": b"1.1", "reason": b""} + + +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def _validate(self): + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 600). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def _validate(self): + if not (200 <= self.status_code < 600): + raise LocalProtocolError( + "Response status_code should be in range [200, 600), not {}".format( + self.status_code + ) + ) + + +class Data(_EventBundle): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + _fields = ["data", "chunk_start", "chunk_end"] + _defaults = {"chunk_start": False, "chunk_end": False} + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +class EndOfMessage(_EventBundle): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + _fields = ["headers"] + _defaults = {"headers": []} + + +class ConnectionClosed(_EventBundle): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/py3/lib/python3.6/site-packages/h11/_headers.py b/py3/lib/python3.6/site-packages/h11/_headers.py new file mode 100644 index 0000000..878f63c --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_headers.py @@ -0,0 +1,171 @@ +import re + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(br"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +def normalize_and_validate(headers, _parsed=False): + new_headers = [] + saw_content_length = False + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + name = name.lower() + if name == b"content-length": + if saw_content_length: + raise LocalProtocolError("multiple Content-Length headers") + validate(_content_length_re, value, "bad Content-Length") + saw_content_length = True + if name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((name, value)) + return new_headers + + +def get_comma_header(headers, name): + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out = [] + for found_name, found_raw_value in headers: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers, name, new_values): + # The header name `name` is expected to be lower-case bytes. + new_headers = [] + for found_name, found_raw_value in headers: + if found_name != name: + new_headers.append((found_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name, new_value)) + headers[:] = normalize_and_validate(new_headers) + + +def has_expect_100_continue(request): + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/py3/lib/python3.6/site-packages/h11/_readers.py b/py3/lib/python3.6/site-packages/h11/_readers.py new file mode 100644 index 0000000..752cac0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_readers.py @@ -0,0 +1,217 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import * +from ._state import * +from ._util import LocalProtocolError, RemoteProtocolError, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) + +# Remember that this has to run in O(n) time -- so e.g. the bytearray cast is +# critical. +obs_fold_re = re.compile(br"[ \t]+") + + +def _obsolete_line_fold(lines): + it = iter(lines) + last = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines(lines): + for line in _obsolete_line_fold(lines): + # _obsolete_line_fold yields either bytearray or bytes objects. On + # Python 3, validate() takes either and returns matches as bytes. But + # on Python 2, validate can return matches as bytearrays, so we have + # to explicitly cast back. + matches = validate(header_field_re, bytes(line)) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf): + lines = buf.maybe_extract_lines() + if lines is None: + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate(request_line_re, lines[0]) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server(buf): + lines = buf.maybe_extract_lines() + if lines is None: + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0]) + # Tolerate missing reason phrases + if matches["reason"] is None: + matches["reason"] = b"" + status_code = matches["status_code"] = int(matches["status_code"]) + class_ = InformationalResponse if status_code < 200 else Response + return class_( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +class ContentLengthReader: + def __init__(self, length): + self._length = length + self._remaining = length + + def __call__(self, buf): + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self): + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader(object): + def __init__(self): + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 + self._reading_trailer = False + + def __call__(self, buf): + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) + if data is None: + return None + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: + return None + # else, fall through and read some more + assert self._bytes_to_discard == 0 + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_until_next(b"\r\n") + if chunk_header is None: + return None + matches = validate(chunk_header_re, chunk_header) + # XX FIXME: we discard chunk extensions. Does anyone care? + # We convert to bytes because Python 2's `int()` function doesn't + # work properly on bytearray objects. + self._bytes_in_chunk = int(bytes(matches["chunk_size"]), base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = 2 + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self): + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader(object): + def __call__(self, buf): + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self): + return EndOfMessage() + + +def expect_nothing(buf): + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +READERS = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/py3/lib/python3.6/site-packages/h11/_receivebuffer.py b/py3/lib/python3.6/site-packages/h11/_receivebuffer.py new file mode 100644 index 0000000..c56749a --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_receivebuffer.py @@ -0,0 +1,112 @@ +import sys + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n, or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +class ReceiveBuffer(object): + def __init__(self): + self._data = bytearray() + # These are both absolute offsets into self._data: + self._start = 0 + self._looked_at = 0 + self._looked_for = b"" + + def __bool__(self): + return bool(len(self)) + + # for @property unprocessed_data + def __bytes__(self): + return bytes(self._data[self._start :]) + + if sys.version_info[0] < 3: # version specific: Python 2 + __str__ = __bytes__ + __nonzero__ = __bool__ + + def __len__(self): + return len(self._data) - self._start + + def compress(self): + # Heuristic: only compress if it lets us reduce size by a factor + # of 2 + if self._start > len(self._data) // 2: + del self._data[: self._start] + self._looked_at -= self._start + self._start -= self._start + + def __iadd__(self, byteslike): + self._data += byteslike + return self + + def maybe_extract_at_most(self, count): + out = self._data[self._start : self._start + count] + if not out: + return None + self._start += len(out) + return out + + def maybe_extract_until_next(self, needle): + # Returns extracted bytes on success (advancing offset), or None on + # failure + if self._looked_for == needle: + search_start = max(self._start, self._looked_at - len(needle) + 1) + else: + search_start = self._start + offset = self._data.find(needle, search_start) + if offset == -1: + self._looked_at = len(self._data) + self._looked_for = needle + return None + new_start = offset + len(needle) + out = self._data[self._start : new_start] + self._start = new_start + return out + + # HTTP/1.1 has a number of constructs where you keep reading lines until + # you see a blank one. This does that, and then returns the lines. + def maybe_extract_lines(self): + if self._data[self._start : self._start + 2] == b"\r\n": + self._start += 2 + return [] + else: + data = self.maybe_extract_until_next(b"\r\n\r\n") + if data is None: + return None + lines = data.split(b"\r\n") + assert lines[-2] == lines[-1] == b"" + del lines[-2:] + return lines diff --git a/py3/lib/python3.6/site-packages/h11/_state.py b/py3/lib/python3.6/site-packages/h11/_state.py new file mode 100644 index 0000000..70a5e04 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_state.py @@ -0,0 +1,307 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! + +from ._events import * +from ._util import LocalProtocolError, make_sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + +CLIENT = make_sentinel("CLIENT") +SERVER = make_sentinel("SERVER") + +# States +IDLE = make_sentinel("IDLE") +SEND_RESPONSE = make_sentinel("SEND_RESPONSE") +SEND_BODY = make_sentinel("SEND_BODY") +DONE = make_sentinel("DONE") +MUST_CLOSE = make_sentinel("MUST_CLOSE") +CLOSED = make_sentinel("CLOSED") +ERROR = make_sentinel("ERROR") + +# Switch types +MIGHT_SWITCH_PROTOCOL = make_sentinel("MIGHT_SWITCH_PROTOCOL") +SWITCHED_PROTOCOL = make_sentinel("SWITCHED_PROTOCOL") + +_SWITCH_UPGRADE = make_sentinel("_SWITCH_UPGRADE") +_SWITCH_CONNECT = make_sentinel("_SWITCH_CONNECT") + +EVENT_TRIGGERED_TRANSITIONS = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState(object): + def __init__(self): + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals = set() + + self.states = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role): + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self): + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event): + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event(self, role, event_type, server_switch_event=None): + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server {} event without a pending proposal".format( + server_switch_event + ) + ) + event_type = (event_type, server_switch_event) + if server_switch_event is None and event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, event_type) + # Special case: the server state does get to see Request + # events. + if event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions(self, role, event_type): + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) + self.states[role] = new_state + + def _fire_state_triggered_transitions(self): + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self): + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + "not in a reusable state. self.states={}".format(self.states) + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/py3/lib/python3.6/site-packages/h11/_util.py b/py3/lib/python3.6/site-packages/h11/_util.py new file mode 100644 index 0000000..0a2c28e --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_util.py @@ -0,0 +1,142 @@ +import re +import sys + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "make_sentinel", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg, error_status_hint=400): + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self): + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + if sys.version_info[0] >= 3: + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + else: + # On py2, preserving the traceback requires 3-argument + # raise... but on py3 this is a syntax error, so we have to hide + # it inside an exec + exec("raise RemoteProtocolError, self, sys.exc_info()[2]") + + +class RemoteProtocolError(ProtocolError): + pass + + +try: + _fullmatch = type(re.compile("")).fullmatch +except AttributeError: + + def _fullmatch(regex, data): # version specific: Python < 3.4 + match = regex.match(data) + if match and match.end() != len(data): + match = None + return match + + +def validate(regex, data, msg="malformed data", *format_args): + match = _fullmatch(regex, data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). +class _SentinelBase(type): + def __repr__(self): + return self.__name__ + + +def make_sentinel(name): + cls = _SentinelBase(name, (_SentinelBase,), {}) + cls.__class__ = cls + return cls + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s): + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/py3/lib/python3.6/site-packages/h11/_version.py b/py3/lib/python3.6/site-packages/h11/_version.py new file mode 100644 index 0000000..2944df8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.9.0" diff --git a/py3/lib/python3.6/site-packages/h11/_writers.py b/py3/lib/python3.6/site-packages/h11/_writers.py new file mode 100644 index 0000000..6a41100 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/_writers.py @@ -0,0 +1,142 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +import sys + +from ._events import Data, EndOfMessage +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError + +__all__ = ["WRITERS"] + +# Equivalent of bstr % values, that works on python 3.x for x < 5 +if (3, 0) <= sys.version_info < (3, 5): + + def bytesmod(bstr, values): + decoded_values = [] + for value in values: + if isinstance(value, bytes): + decoded_values.append(value.decode("ascii")) + else: + decoded_values.append(value) + return (bstr.decode("ascii") % tuple(decoded_values)).encode("ascii") + + +else: + + def bytesmod(bstr, values): + return bstr % values + + +def write_headers(headers, write): + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + for name, value in headers: + if name == b"host": + write(bytesmod(b"%s: %s\r\n", (name, value))) + for name, value in headers: + if name != b"host": + write(bytesmod(b"%s: %s\r\n", (name, value))) + write(b"\r\n") + + +def write_request(request, write): + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(bytesmod(b"%s %s HTTP/1.1\r\n", (request.method, request.target))) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response(response, write): + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(bytesmod(b"HTTP/1.1 %s %s\r\n", (status_bytes, response.reason))) + write_headers(response.headers, write) + + +class BodyWriter(object): + def __call__(self, event, write): + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length): + self._length = length + + def send_data(self, data, write): + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers, write): + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data, write): + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(bytesmod(b"%x\r\n", (len(data),))) + write(data) + write(b"\r\n") + + def send_eom(self, headers, write): + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data, write): + write(data) + + def send_eom(self, headers, write): + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WRITERS = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/py3/lib/python3.6/site-packages/h11/tests/__init__.py b/py3/lib/python3.6/site-packages/h11/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/py3/lib/python3.6/site-packages/h11/tests/data/test-file b/py3/lib/python3.6/site-packages/h11/tests/data/test-file new file mode 100644 index 0000000..d0be0a6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/data/test-file @@ -0,0 +1 @@ +92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/py3/lib/python3.6/site-packages/h11/tests/helpers.py b/py3/lib/python3.6/site-packages/h11/tests/helpers.py new file mode 100644 index 0000000..9d2cf38 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/helpers.py @@ -0,0 +1,77 @@ +from .._connection import * +from .._events import * +from .._state import * + + +def get_all_events(conn): + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn, data): + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events): + out_events = [] + for event in in_events: + if type(event) is Data: + event.data = bytes(event.data) + event.chunk_start = False + event.chunk_end = False + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1].data += event.data + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self): + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self): + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send(self, role, send_events, expect="match"): + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_against_stdlib_http.py b/py3/lib/python3.6/site-packages/h11/tests/test_against_stdlib_http.py new file mode 100644 index 0000000..b4219ff --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_against_stdlib_http.py @@ -0,0 +1,123 @@ +import json +import os.path +import socket +import threading +from contextlib import closing, contextmanager + +import h11 + +try: + from urllib.request import urlopen +except ImportError: # version specific: Python 2 + from urllib2 import urlopen + +try: + import socketserver +except ImportError: # version specific: Python 2 + import SocketServer as socketserver + +try: + from http.server import SimpleHTTPRequestHandler +except ImportError: # version specific: Python 2 + from SimpleHTTPServer import SimpleHTTPRequestHandler + + +@contextmanager +def socket_server(handler): + httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) + thread = threading.Thread( + target=httpd.serve_forever, kwargs={"poll_interval": 0.01} + ) + thread.daemon = True + try: + thread.start() + yield httpd + finally: + httpd.shutdown() + + +test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") +with open(test_file_path, "rb") as f: + test_file_data = f.read() + + +class SingleMindedRequestHandler(SimpleHTTPRequestHandler): + def translate_path(self, path): + return test_file_path + + +def test_h11_as_client(): + with socket_server(SingleMindedRequestHandler) as httpd: + with closing(socket.create_connection(httpd.server_address)) as s: + c = h11.Connection(h11.CLIENT) + + s.sendall( + c.send( + h11.Request( + method="GET", target="/foo", headers=[("Host", "localhost")] + ) + ) + ) + s.sendall(c.send(h11.EndOfMessage())) + + data = bytearray() + while True: + event = c.next_event() + print(event) + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Response: + assert event.status_code == 200 + if type(event) is h11.Data: + data += event.data + if type(event) is h11.EndOfMessage: + break + assert bytes(data) == test_file_data + + +class H11RequestHandler(socketserver.BaseRequestHandler): + def handle(self): + with closing(self.request) as s: + c = h11.Connection(h11.SERVER) + request = None + while True: + event = c.next_event() + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Request: + request = event + if type(event) is h11.EndOfMessage: + break + info = json.dumps( + { + "method": request.method.decode("ascii"), + "target": request.target.decode("ascii"), + "headers": { + name.decode("ascii"): value.decode("ascii") + for (name, value) in request.headers + }, + } + ) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) + s.sendall(c.send(h11.Data(data=info.encode("ascii")))) + s.sendall(c.send(h11.EndOfMessage())) + + +def test_h11_as_server(): + with socket_server(H11RequestHandler) as httpd: + host, port = httpd.server_address + url = "http://{}:{}/some-path".format(host, port) + with closing(urlopen(url)) as f: + assert f.getcode() == 200 + data = f.read() + info = json.loads(data.decode("ascii")) + print(info) + assert info["method"] == "GET" + assert info["target"] == "/some-path" + assert "urllib" in info["headers"]["user-agent"] diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_connection.py b/py3/lib/python3.6/site-packages/h11/tests/test_connection.py new file mode 100644 index 0000000..13e6e2d --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_connection.py @@ -0,0 +1,1046 @@ +import pytest + +from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED +from .._events import * +from .._state import * +from .._util import LocalProtocolError, RemoteProtocolError +from .helpers import ConnectionPair, get_all_events, receive_and_get + + +def test__keep_alive(): + assert _keep_alive( + Request(method="GET", target="/", headers=[("Host", "Example.com")]) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "close")], + ) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], + ) + ) + assert not _keep_alive( + Request(method="GET", target="/", headers=[], http_version="1.0") + ) + + assert _keep_alive(Response(status_code=200, headers=[])) + assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) + assert not _keep_alive( + Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) + ) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) + + +def test__body_framing(): + def headers(cl, te): + headers = [] + if cl is not None: + headers.append(("Content-Length", str(cl))) + if te: + headers.append(("Transfer-Encoding", "chunked")) + return headers + + def resp(status_code=200, cl=None, te=False): + return Response(status_code=status_code, headers=headers(cl, te)) + + def req(cl=None, te=False): + h = headers(cl, te) + h += [("Host", "example.com")] + return Request(method="GET", target="/", headers=h) + + # Special cases where the headers are ignored: + for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + for meth, r in [ + (b"HEAD", resp(**kwargs)), + (b"GET", resp(status_code=204, **kwargs)), + (b"GET", resp(status_code=304, **kwargs)), + ]: + assert _body_framing(meth, r) == ("content-length", (0,)) + + # Transfer-encoding + for kwargs in [{"te": True}, {"cl": 100, "te": True}]: + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: + assert _body_framing(meth, r) == ("chunked", ()) + + # Content-Length + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: + assert _body_framing(meth, r) == ("content-length", (100,)) + + # No headers + assert _body_framing(None, req()) == ("content-length", (0,)) + assert _body_framing(b"GET", resp()) == ("http/1.0", ()) + + +def test_Connection_basics_and_content_length(): + with pytest.raises(ValueError): + Connection("CLIENT") + + p = ConnectionPair() + assert p.conn[CLIENT].our_role is CLIENT + assert p.conn[CLIENT].their_role is SERVER + assert p.conn[SERVER].our_role is SERVER + assert p.conn[SERVER].their_role is CLIENT + + data = p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Content-Length", "10")], + ), + ) + assert data == ( + b"GET / HTTP/1.1\r\n" b"host: example.com\r\n" b"content-length: 10\r\n\r\n" + ) + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + assert p.conn[CLIENT].our_state is SEND_BODY + assert p.conn[CLIENT].their_state is SEND_RESPONSE + assert p.conn[SERVER].our_state is SEND_RESPONSE + assert p.conn[SERVER].their_state is SEND_BODY + + assert p.conn[CLIENT].their_http_version is None + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) + assert data == b"HTTP/1.1 100 \r\n\r\n" + + data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) + assert data == b"HTTP/1.1 200 \r\ncontent-length: 11\r\n\r\n" + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + assert p.conn[CLIENT].their_http_version == b"1.1" + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(CLIENT, Data(data=b"12345")) + assert data == b"12345" + data = p.send( + CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] + ) + assert data == b"67890" + data = p.send(CLIENT, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + + data = p.send(SERVER, Data(data=b"1234567890")) + assert data == b"1234567890" + data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) + assert data == b"1" + data = p.send(SERVER, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunked(): + p = ConnectionPair() + + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ), + ) + data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) + assert data == b"a\r\n1234567890\r\n" + data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) + assert data == b"5\r\nabcde\r\n" + data = p.send(CLIENT, Data(data=b""), expect=[]) + assert data == b"" + data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) + assert data == b"0\r\nhello: there\r\n\r\n" + + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) + p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) + p.send(SERVER, EndOfMessage()) + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunk_boundaries(): + conn = Connection(our_role=SERVER) + + request = ( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + conn.receive_data(request) + assert conn.next_event() == Request( + method="POST", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ) + assert conn.next_event() is NEED_DATA + + conn.receive_data(b"5\r\nhello\r\n") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"5\r\nhel") + assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) + + conn.receive_data(b"l") + assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) + + conn.receive_data(b"o\r\n") + assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) + + conn.receive_data(b"5\r\nhello") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"\r\n") + assert conn.next_event() == NEED_DATA + + conn.receive_data(b"0\r\n\r\n") + assert conn.next_event() == EndOfMessage() + + +def test_client_talking_to_http10_server(): + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) + c.send(EndOfMessage()) + assert c.our_state is DONE + # No content-length, so Http10 framing for body + assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") + ] + assert c.our_state is MUST_CLOSE + assert receive_and_get(c, b"12345") == [Data(data=b"12345")] + assert receive_and_get(c, b"67890") == [Data(data=b"67890")] + assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] + assert c.their_state is CLOSED + + +def test_server_talking_to_http10_client(): + c = Connection(SERVER) + # No content-length, so no body + # NB: no host header + assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), + EndOfMessage(), + ] + assert c.their_state is MUST_CLOSE + + # We automatically Connection: close back at them + assert ( + c.send(Response(status_code=200, headers=[])) + == b"HTTP/1.1 200 \r\nconnection: close\r\n\r\n" + ) + + assert c.send(Data(data=b"12345")) == b"12345" + assert c.send(EndOfMessage()) == b"" + assert c.our_state is MUST_CLOSE + + # Check that it works if they do send Content-Length + c = Connection(SERVER) + # NB: no host header + assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ + Request( + method="POST", + target="/", + headers=[("Content-Length", "10")], + http_version="1.0", + ), + Data(data=b"1"), + ] + assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] + assert c.their_state is MUST_CLOSE + assert receive_and_get(c, b"") == [ConnectionClosed()] + + +def test_automatic_transfer_encoding_in_response(): + # Check that in responses, the user can specify either Transfer-Encoding: + # chunked or no framing at all, and in both cases we automatically select + # the right option depending on whether the peer speaks HTTP/1.0 or + # HTTP/1.1 + for user_headers in [ + [("Transfer-Encoding", "chunked")], + [], + # In fact, this even works if Content-Length is set, + # because if both are set then Transfer-Encoding wins + [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], + ]: + p = ConnectionPair() + p.send( + CLIENT, + [ + Request(method="GET", target="/", headers=[("Host", "example.com")]), + EndOfMessage(), + ], + ) + # When speaking to HTTP/1.1 client, all of the above cases get + # normalized to Transfer-Encoding: chunked + p.send( + SERVER, + Response(status_code=200, headers=user_headers), + expect=Response( + status_code=200, headers=[("Transfer-Encoding", "chunked")] + ), + ) + + # When speaking to HTTP/1.0 client, all of the above cases get + # normalized to no-framing-headers + c = Connection(SERVER) + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert ( + c.send(Response(status_code=200, headers=user_headers)) + == b"HTTP/1.1 200 \r\nconnection: close\r\n\r\n" + ) + assert c.send(Data(data=b"12345")) == b"12345" + + +def test_automagic_connection_close_handling(): + p = ConnectionPair() + # If the user explicitly sets Connection: close, then we notice and + # respect it + p.send( + CLIENT, + [ + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Connection", "close")], + ), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states[CLIENT] is MUST_CLOSE + # And if the client sets it, the server automatically echoes it back + p.send( + SERVER, + # no header here... + [Response(status_code=204, headers=[]), EndOfMessage()], + # ...but oh look, it arrived anyway + expect=[ + Response(status_code=204, headers=[("connection", "close")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_100_continue(): + def setup(): + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[ + ("Host", "example.com"), + ("Content-Length", "100"), + ("Expect", "100-continue"), + ], + ), + ) + for conn in p.conns: + assert conn.client_is_waiting_for_100_continue + assert not p.conn[CLIENT].they_are_waiting_for_100_continue + assert p.conn[SERVER].they_are_waiting_for_100_continue + return p + + # Disabled by 100 Continue + p = setup() + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by a real response + p = setup() + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by the client going ahead and sending stuff anyway + p = setup() + p.send(CLIENT, Data(data=b"12345")) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + +def test_max_incomplete_event_size_countermeasure(): + # Infinitely long headers are definitely not okay + c = Connection(SERVER) + c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") + assert c.next_event() is NEED_DATA + with pytest.raises(RemoteProtocolError): + while True: + c.receive_data(b"a" * 1024) + c.next_event() + + # Checking that the same header is accepted / rejected depending on the + # max_incomplete_event_size setting: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + c.receive_data(b"\r\n\r\n") + assert get_all_events(c) == [ + Request( + method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] + ), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=4000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + with pytest.raises(RemoteProtocolError): + c.next_event() + + # Temporarily exceeding the size limit is fine, as long as its done with + # complete events: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") + c.receive_data(b"\r\n\r\n" + b"a" * 10000) + assert get_all_events(c) == [ + Request( + method="GET", + target="/", + http_version="1.0", + headers=[("Content-Length", "10000")], + ), + Data(data=b"a" * 10000), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=100) + # Two pipelined requests to create a way-too-big receive buffer... but + # it's fine because we're not checking + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" + b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 + ) + assert get_all_events(c) == [ + Request(method="GET", target="/1", headers=[("host", "a")]), + EndOfMessage(), + ] + # Even more data comes in, still no problem + c.receive_data(b"X" * 1000) + # We can respond and reuse to get the second pipelined request + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + assert get_all_events(c) == [ + Request(method="GET", target="/2", headers=[("host", "b")]), + EndOfMessage(), + ] + # But once we unpause and try to read the next message, and find that it's + # incomplete and the buffer is *still* way too large, then *that's* a + # problem: + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_reuse_simple(): + p = ConnectionPair() + p.send( + CLIENT, + [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], + ) + p.send(SERVER, [Response(status_code=200, headers=[]), EndOfMessage()]) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + conn.start_next_cycle() + + p.send( + CLIENT, + [ + Request(method="DELETE", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ], + ) + p.send(SERVER, [Response(status_code=404, headers=[]), EndOfMessage()]) + + +def test_pipelining(): + # Client doesn't support pipelining, so we have to do this by hand + c = Connection(SERVER) + assert c.next_event() is NEED_DATA + # 3 requests all bunched up + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" + ) + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.their_state is DONE + assert c.our_state is SEND_RESPONSE + + assert c.next_event() is PAUSED + + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.their_state is DONE + assert c.our_state is DONE + + c.start_next_cycle() + + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ] + assert c.next_event() is PAUSED + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + + assert get_all_events(c) == [ + Request(method="GET", target="/3", headers=[("Host", "a.com")]), + EndOfMessage(), + ] + # Doesn't pause this time, no trailing data + assert c.next_event() is NEED_DATA + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + + # Arrival of more data triggers pause + assert c.next_event() is NEED_DATA + c.receive_data(b"SADF") + assert c.next_event() is PAUSED + assert c.trailing_data == (b"SADF", False) + # If EOF arrives while paused, we don't see that either: + c.receive_data(b"") + assert c.trailing_data == (b"SADF", True) + assert c.next_event() is PAUSED + c.receive_data(b"") + assert c.next_event() is PAUSED + # Can't call receive_data with non-empty buf after closing it + with pytest.raises(RuntimeError): + c.receive_data(b"FDSA") + + +def test_protocol_switch(): + for (req, deny, accept) in [ + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1")], + ), + Response(status_code=404, headers=[]), + Response(status_code=200, headers=[]), + ), + ( + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=200, headers=[]), + InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[]), + # Accept CONNECT, not upgrade + Response(status_code=200, headers=[]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[]), + # Accept Upgrade, not CONNECT + InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), + ), + ]: + + def setup(): + p = ConnectionPair() + p.send(CLIENT, req) + # No switch-related state change stuff yet; the client has to + # finish the request before that kicks in + for conn in p.conns: + assert conn.states[CLIENT] is SEND_BODY + p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) + for conn in p.conns: + assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL + assert p.conn[SERVER].next_event() is PAUSED + return p + + # Test deny case + p = setup() + p.send(SERVER, deny) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + p.send(SERVER, EndOfMessage()) + # Check that re-use is still allowed after a denial + for conn in p.conns: + conn.start_next_cycle() + + # Test accept case + p = setup() + p.send(SERVER, accept) + for conn in p.conns: + assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + conn.receive_data(b"123") + assert conn.next_event() is PAUSED + conn.receive_data(b"456") + assert conn.next_event() is PAUSED + assert conn.trailing_data == (b"123456", False) + + # Pausing in might-switch, then recovery + # (weird artificial case where the trailing data actually is valid + # HTTP for some reason, because this makes it easier to test the state + # logic) + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) + sc.send(deny) + assert sc.next_event() is PAUSED + sc.send(EndOfMessage()) + sc.start_next_cycle() + assert get_all_events(sc) == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), + EndOfMessage(), + ] + + # When we're DONE, have no trailing data, and the connection gets + # closed, we report ConnectionClosed(). When we're in might-switch or + # switched, we don't. + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"", True) + p.send(SERVER, accept) + assert sc.next_event() is PAUSED + + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") == [] + assert sc.next_event() is PAUSED + sc.send(deny) + assert sc.next_event() == ConnectionClosed() + + # You can't send after switching protocols, or while waiting for a + # protocol switch + p = setup() + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send( + Request(method="GET", target="/", headers=[("Host", "a")]) + ) + p = setup() + p.send(SERVER, accept) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(Data(data=b"123")) + + +def test_close_simple(): + # Just immediately closing a new connection without anything having + # happened yet. + for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: + + def setup(): + p = ConnectionPair() + p.send(who_shot_first, ConnectionClosed()) + for conn in p.conns: + assert conn.states == { + who_shot_first: CLOSED, + who_shot_second: MUST_CLOSE, + } + return p + + # You can keep putting b"" into a closed connection, and you keep + # getting ConnectionClosed() out: + p = setup() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + p.conn[who_shot_second].receive_data(b"") + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + # Second party can close... + p = setup() + p.send(who_shot_second, ConnectionClosed()) + for conn in p.conns: + assert conn.our_state is CLOSED + assert conn.their_state is CLOSED + # But trying to receive new data on a closed connection is a + # RuntimeError (not ProtocolError, because the problem here isn't + # violation of HTTP, it's violation of physics) + p = setup() + with pytest.raises(RuntimeError): + p.conn[who_shot_second].receive_data(b"123") + # And receiving new data on a MUST_CLOSE connection is a ProtocolError + p = setup() + p.conn[who_shot_first].receive_data(b"GET") + with pytest.raises(RemoteProtocolError): + p.conn[who_shot_first].next_event() + + +def test_close_different_states(): + req = [ + Request(method="GET", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ] + resp = [Response(status_code=200, headers=[]), EndOfMessage()] + + # Client before request + p = ConnectionPair() + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + + # Client after request + p = ConnectionPair() + p.send(CLIENT, req) + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + + # Server after request -> not allowed + p = ConnectionPair() + p.send(CLIENT, req) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(ConnectionClosed()) + p.conn[CLIENT].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[CLIENT].next_event() + + # Server after response + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(SERVER, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + # Both after closing (ConnectionClosed() is idempotent) + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + + # In the middle of sending -> not allowed + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] + ), + ) + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send(ConnectionClosed()) + p.conn[SERVER].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[SERVER].next_event() + + +# Receive several requests and then client shuts down their side of the +# connection; we can respond to each +def test_pipelined_close(): + c = Connection(SERVER) + # 2 requests then a close + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + ) + c.receive_data(b"") + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.states[CLIENT] is DONE + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.states[SERVER] is DONE + c.start_next_cycle() + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ConnectionClosed(), + ] + assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + c.send(ConnectionClosed()) + assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} + + +def test_sendfile(): + class SendfilePlaceholder: + def __len__(self): + return 10 + + placeholder = SendfilePlaceholder() + + def setup(header, http_version): + c = Connection(SERVER) + receive_and_get( + c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") + ) + headers = [] + if header: + headers.append(header) + c.send(Response(status_code=200, headers=headers)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) + + c, data = setup(("Content-Length", "10"), "1.1") + assert data == [placeholder] + # Raises an error if the connection object doesn't think we've sent + # exactly 10 bytes + c.send(EndOfMessage()) + + _, data = setup(("Transfer-Encoding", "chunked"), "1.1") + assert placeholder in data + data[data.index(placeholder)] = b"x" * 10 + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" + + c, data = setup(None, "1.0") + assert data == [placeholder] + assert c.our_state is SEND_BODY + + +def test_errors(): + # After a receive error, you can't receive + for role in [CLIENT, SERVER]: + c = Connection(our_role=role) + c.receive_data(b"gibberish\r\n\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + # Now any attempt to receive continues to raise + assert c.their_state is ERROR + assert c.our_state is not ERROR + print(c._cstate.states) + with pytest.raises(RemoteProtocolError): + c.next_event() + # But we can still yell at the client for sending us gibberish + if role is SERVER: + assert ( + c.send(Response(status_code=400, headers=[])) + == b"HTTP/1.1 400 \r\nconnection: close\r\n\r\n" + ) + + # After an error sending, you can no longer send + # (This is especially important for things like content-length errors, + # where there's complex internal state being modified) + def conn(role): + c = Connection(our_role=role) + if role is SERVER: + # Put it into the state where it *could* send a response... + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert c.our_state is SEND_RESPONSE + return c + + for role in [CLIENT, SERVER]: + if role is CLIENT: + # This HTTP/1.0 request won't be detected as bad until after we go + # through the state machine and hit the writing code + good = Request(method="GET", target="/", headers=[("Host", "example.com")]) + bad = Request( + method="GET", + target="/", + headers=[("Host", "example.com")], + http_version="1.0", + ) + elif role is SERVER: + good = Response(status_code=200, headers=[]) + bad = Response(status_code=200, headers=[], http_version="1.0") + # Make sure 'good' actually is good + c = conn(role) + c.send(good) + assert c.our_state is not ERROR + # Do that again, but this time sending 'bad' first + c = conn(role) + with pytest.raises(LocalProtocolError): + c.send(bad) + assert c.our_state is ERROR + assert c.their_state is not ERROR + # Now 'good' is not so good + with pytest.raises(LocalProtocolError): + c.send(good) + + # And check send_failed() too + c = conn(role) + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + # This is idempotent + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + + +def test_idle_receive_nothing(): + # At one point this incorrectly raised an error + for role in [CLIENT, SERVER]: + c = Connection(role) + assert c.next_event() is NEED_DATA + + +def test_connection_drop(): + c = Connection(SERVER) + c.receive_data(b"GET /") + assert c.next_event() is NEED_DATA + c.receive_data(b"") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_408_request_timeout(): + # Should be able to send this spontaneously as a server without seeing + # anything from client + p = ConnectionPair() + p.send(SERVER, Response(status_code=408, headers=[])) + + +# This used to raise IndexError +def test_empty_request(): + c = Connection(SERVER) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to raise IndexError +def test_empty_response(): + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "a")])) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to give different headers for HEAD and GET. +# The correct way to handle HEAD is to put whatever headers we *would* have +# put if it were a GET -- even though we know that for HEAD, those headers +# will be ignored. +def test_HEAD_framing_headers(): + def setup(method, http_version): + c = Connection(SERVER) + c.receive_data( + method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert type(c.next_event()) is EndOfMessage + return c + + for method in [b"GET", b"HEAD"]: + # No Content-Length, HTTP/1.1 peer, should use chunked + c = setup(method, b"1.1") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" + b"transfer-encoding: chunked\r\n\r\n" + ) + + # No Content-Length, HTTP/1.0 peer, frame with connection: close + c = setup(method, b"1.0") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" + b"connection: close\r\n\r\n" + ) + + # Content-Length + Transfer-Encoding, TE wins + c = setup(method, b"1.1") + assert ( + c.send( + Response( + status_code=200, + headers=[ + ("Content-Length", "100"), + ("Transfer-Encoding", "chunked"), + ], + ) + ) + == b"HTTP/1.1 200 \r\n" + b"transfer-encoding: chunked\r\n\r\n" + ) + + +def test_special_exceptions_for_lost_connection_in_message_body(): + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"12345") + assert c.next_event() == Data(data=b"12345") + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "received 5 bytes" in str(excinfo.value) + assert "expected 100" in str(excinfo.value) + + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"8\r\n012345") + assert c.next_event().data == b"012345" + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "incomplete chunked read" in str(excinfo.value) diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_events.py b/py3/lib/python3.6/site-packages/h11/tests/test_events.py new file mode 100644 index 0000000..4c2a912 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_events.py @@ -0,0 +1,165 @@ +import pytest + +from .. import _events +from .._events import * +from .._util import LocalProtocolError + + +def test_event_bundle(): + class T(_events._EventBundle): + _fields = ["a", "b"] + _defaults = {"b": 1} + + def _validate(self): + if self.a == 0: + raise ValueError + + # basic construction and methods + t = T(a=1, b=0) + assert repr(t) == "T(a=1, b=0)" + assert t == T(a=1, b=0) + assert not (t == T(a=2, b=0)) + assert not (t != T(a=1, b=0)) + assert t != T(a=2, b=0) + with pytest.raises(TypeError): + hash(t) + + # check defaults + t = T(a=10) + assert t.a == 10 + assert t.b == 1 + + # no positional args + with pytest.raises(TypeError): + T(1) + + with pytest.raises(TypeError): + T(1, a=1, b=0) + + # unknown field + with pytest.raises(TypeError): + T(a=1, b=0, c=10) + + # missing required field + with pytest.raises(TypeError) as exc: + T(b=0) + # make sure we error on the right missing kwarg + assert "kwarg a" in str(exc) + + # _validate is called + with pytest.raises(ValueError): + T(a=0, b=0) + + +def test_events(): + with pytest.raises(LocalProtocolError): + # Missing Host: + req = Request( + method="GET", target="/", headers=[("a", "b")], http_version="1.1" + ) + # But this is okay (HTTP/1.0) + req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") + # fields are normalized + assert req.method == b"GET" + assert req.target == b"/" + assert req.headers == [(b"a", b"b")] + assert req.http_version == b"1.0" + + # This is also okay -- has a Host (with weird capitalization, which is ok) + req = Request( + method="GET", + target="/", + headers=[("a", "b"), ("hOSt", "example.com")], + http_version="1.1", + ) + # we normalize header capitalization + assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] + + # Multiple host is bad too + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.1", + ) + # Even for HTTP/1.0 + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.0", + ) + + # Header values are validated + for bad_char in "\x00\r\n\f\v": + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd" + bad_char)], + http_version="1.0", + ) + + # But for compatibility we allow non-whitespace control characters, even + # though they're forbidden by the spec. + Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], + http_version="1.0", + ) + + # Request target is validated + for bad_char in b"\x00\x20\x7f\xee": + target = bytearray(b"/") + target.append(bad_char) + with pytest.raises(LocalProtocolError): + Request( + method="GET", target=target, headers=[("Host", "a")], http_version="1.1" + ) + + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) + assert ir.status_code == 100 + assert ir.headers == [(b"host", b"a")] + assert ir.http_version == b"1.1" + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=200, headers=[("Host", "a")]) + + resp = Response(status_code=204, headers=[], http_version="1.0") + assert resp.status_code == 204 + assert resp.headers == [] + assert resp.http_version == b"1.0" + + with pytest.raises(LocalProtocolError): + resp = Response(status_code=100, headers=[], http_version="1.0") + + with pytest.raises(LocalProtocolError): + Response(status_code="100", headers=[], http_version="1.0") + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") + + d = Data(data=b"asdf") + assert d.data == b"asdf" + + eom = EndOfMessage() + assert eom.headers == [] + + cc = ConnectionClosed() + assert repr(cc) == "ConnectionClosed()" + + +def test_intenum_status_code(): + # https://github.com/python-hyper/h11/issues/72 + try: + from http import HTTPStatus + except ImportError: + pytest.skip("Only affects Python 3") + + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") + assert r.status_code == HTTPStatus.OK + assert type(r.status_code) is not type(HTTPStatus.OK) + assert type(r.status_code) is int diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_headers.py b/py3/lib/python3.6/site-packages/h11/tests/test_headers.py new file mode 100644 index 0000000..67bcd7b --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_headers.py @@ -0,0 +1,139 @@ +import pytest + +from .._headers import * + + +def test_normalize_and_validate(): + assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] + assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] + + # no leading/trailing whitespace in names + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo ", "bar")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b" foo", "bar")]) + + # no weird characters in names + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([(b"foo bar", b"baz")]) + assert "foo bar" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x00bar", b"baz")]) + # Not even 8-bit characters: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\xffbar", b"baz")]) + # And not even the control characters we allow in values: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x01bar", b"baz")]) + + # no return or NUL characters in values + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("foo", "bar\rbaz")]) + assert "bar\\rbaz" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\nbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\x00baz")]) + # no leading/trailing whitespace + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz ")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", " barbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz\t")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "\tbarbaz")]) + + # content-length + assert normalize_and_validate([("Content-Length", "1")]) == [ + (b"content-length", b"1") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "asdf")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1x")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) + + # transfer-encoding + assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ + (b"transfer-encoding", b"chunked") + ] + assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ + (b"transfer-encoding", b"chunked") + ] + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("Transfer-Encoding", "gzip")]) + assert excinfo.value.error_status_hint == 501 # Not Implemented + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate( + [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] + ) + assert excinfo.value.error_status_hint == 501 # Not Implemented + + +def test_get_set_comma_header(): + headers = normalize_and_validate( + [ + ("Connection", "close"), + ("whatever", "something"), + ("connectiON", "fOo,, , BAR"), + ] + ) + + assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] + + set_comma_header(headers, b"newthing", ["a", "b"]) + + with pytest.raises(LocalProtocolError): + set_comma_header(headers, b"newthing", [" a", "b"]) + + assert headers == [ + (b"connection", b"close"), + (b"whatever", b"something"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + ] + + set_comma_header(headers, b"whatever", ["different thing"]) + + assert headers == [ + (b"connection", b"close"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + (b"whatever", b"different thing"), + ] + + +def test_has_100_continue(): + from .._events import Request + + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + ) + ) + assert not has_expect_100_continue( + Request(method="GET", target="/", headers=[("Host", "example.com")]) + ) + # Case insensitive + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-Continue")], + ) + ) + # Doesn't work in HTTP/1.0 + assert not has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + http_version="1.0", + ) + ) diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_helpers.py b/py3/lib/python3.6/site-packages/h11/tests/test_helpers.py new file mode 100644 index 0000000..1477947 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_helpers.py @@ -0,0 +1,23 @@ +from .helpers import * + + +def test_normalize_data_events(): + assert normalize_data_events( + [ + Data(data=bytearray(b"1")), + Data(data=b"2"), + Response(status_code=200, headers=[]), + Data(data=b"3"), + Data(data=b"4"), + EndOfMessage(), + Data(data=b"5"), + Data(data=b"6"), + Data(data=b"7"), + ] + ) == [ + Data(data=b"12"), + Response(status_code=200, headers=[]), + Data(data=b"34"), + EndOfMessage(), + Data(data=b"567"), + ] diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_io.py b/py3/lib/python3.6/site-packages/h11/tests/test_io.py new file mode 100644 index 0000000..ef5e31b --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_io.py @@ -0,0 +1,507 @@ +import pytest + +from .._events import * +from .._headers import normalize_and_validate +from .._readers import ( + _obsolete_line_fold, + ChunkedReader, + ContentLengthReader, + Http10Reader, + READERS, +) +from .._receivebuffer import ReceiveBuffer +from .._state import * +from .._util import LocalProtocolError +from .._writers import ( + ChunkedWriter, + ContentLengthWriter, + Http10Writer, + write_any_response, + write_headers, + write_request, + WRITERS, +) +from .helpers import normalize_data_events + +SIMPLE_CASES = [ + ( + (CLIENT, IDLE), + Request( + method="GET", + target="/a", + headers=[("Host", "foo"), ("Connection", "close")], + ), + b"GET /a HTTP/1.1\r\nhost: foo\r\nconnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), + b"HTTP/1.1 200 OK\r\nconnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[], reason=b"OK"), + b"HTTP/1.1 200 OK\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse( + status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" + ), + b"HTTP/1.1 101 Upgrade\r\nupgrade: websocket\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), + b"HTTP/1.1 101 Upgrade\r\n\r\n", + ), +] + + +def dowrite(writer, obj): + got_list = [] + writer(obj, got_list.append) + return b"".join(got_list) + + +def tw(writer, obj, expected): + got = dowrite(writer, obj) + assert got == expected + + +def makebuf(data): + buf = ReceiveBuffer() + buf += data + return buf + + +def tr(reader, data, expected): + def check(got): + assert got == expected + # Headers should always be returned as bytes, not e.g. bytearray + # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 + for name, value in getattr(got, "headers", []): + print(name, value) + assert type(name) is bytes + assert type(value) is bytes + + # Simple: consume whole thing + buf = makebuf(data) + check(reader(buf)) + assert not buf + + # Incrementally growing buffer + buf = ReceiveBuffer() + for i in range(len(data)): + assert reader(buf) is None + buf += data[i : i + 1] + check(reader(buf)) + + # Trailing data + buf = makebuf(data) + buf += b"trailing" + check(reader(buf)) + assert bytes(buf) == b"trailing" + + +def test_writers_simple(): + for ((role, state), event, binary) in SIMPLE_CASES: + tw(WRITERS[role, state], event, binary) + + +def test_readers_simple(): + for ((role, state), event, binary) in SIMPLE_CASES: + tr(READERS[role, state], binary, event) + + +def test_writers_unusual(): + # Simple test of the write_headers utility routine + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("baz", "quux")]), + b"foo: bar\r\nbaz: quux\r\n\r\n", + ) + tw(write_headers, [], b"\r\n") + + # We understand HTTP/1.0, but we don't speak it + with pytest.raises(LocalProtocolError): + tw( + write_request, + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Connection", "close")], + http_version="1.0", + ), + None, + ) + with pytest.raises(LocalProtocolError): + tw( + write_any_response, + Response( + status_code=200, headers=[("Connection", "close")], http_version="1.0" + ), + None, + ) + + +def test_readers_unusual(): + # Reading HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[("Some", "header")], + http_version="1.0", + ), + ) + + # check no-headers, since it's only legal with HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\n\r\n", + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", + Response( + status_code=200, + headers=[("Some", "header")], + http_version="1.0", + reason=b"OK", + ), + ) + + # single-character header values (actually disallowed by the ABNF in RFC + # 7230 -- this is a bug in the standard that we originally copied...) + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", + Response( + status_code=200, + headers=[("Foo", "a a a a a")], + http_version="1.0", + reason=b"OK", + ), + ) + + # Empty headers -- also legal + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + # Tolerate broken servers that leave off the response code + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" + ), + ) + + # obsolete line folding + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Some: multi-line\r\n" + b" header\r\n" + b"\tnonsense\r\n" + b" \t \t\tI guess\r\n" + b"Connection: close\r\n" + b"More-nonsense: in the\r\n" + b" last header \r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "example.com"), + ("Some", "multi-line header nonsense I guess"), + ("Connection", "close"), + ("More-nonsense", "in the last header"), + ], + ), + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", + None, + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) + + +def test__obsolete_line_fold_bytes(): + # _obsolete_line_fold has a defensive cast to bytearray, which is + # necessary to protect against O(n^2) behavior in case anyone ever passes + # in regular bytestrings... but right now we never pass in regular + # bytestrings. so this test just exists to get some coverage on that + # defensive cast. + assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ + b"aaa", + bytearray(b"bbb ccc"), + b"ddd", + ] + + +def _run_reader_iter(reader, buf, do_eof): + while True: + event = reader(buf) + if event is None: + break + yield event + # body readers have undefined behavior after returning EndOfMessage, + # because this changes the state so they don't get called again + if type(event) is EndOfMessage: + break + if do_eof: + assert not buf + yield reader.read_eof() + + +def _run_reader(*args): + events = list(_run_reader_iter(*args)) + return normalize_data_events(events) + + +def t_body_reader(thunk, data, expected, do_eof=False): + # Simple: consume whole thing + print("Test 1") + buf = makebuf(data) + assert _run_reader(thunk(), buf, do_eof) == expected + + # Incrementally growing buffer + print("Test 2") + reader = thunk() + buf = ReceiveBuffer() + events = [] + for i in range(len(data)): + events += _run_reader(reader, buf, False) + buf += data[i : i + 1] + events += _run_reader(reader, buf, do_eof) + assert normalize_data_events(events) == expected + + is_complete = any(type(event) is EndOfMessage for event in expected) + if is_complete and not do_eof: + buf = makebuf(data + b"trailing") + assert _run_reader(thunk(), buf, False) == expected + + +def test_ContentLengthReader(): + t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) + + t_body_reader( + lambda: ContentLengthReader(10), + b"0123456789", + [Data(data=b"0123456789"), EndOfMessage()], + ) + + +def test_Http10Reader(): + t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) + t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) + t_body_reader( + Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True + ) + + +def test_ChunkedReader(): + t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) + + t_body_reader( + ChunkedReader, + b"0\r\nSome: header\r\n\r\n", + [EndOfMessage(headers=[("Some", "header")])], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + + b"10\r\n0123456789abcdef\r\n" + + b"0\r\n" + + b"Some: header\r\n\r\n", + [ + Data(data=b"012340123456789abcdef"), + EndOfMessage(headers=[("Some", "header")]), + ], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", + [Data(data=b"012340123456789abcdef"), EndOfMessage()], + ) + + # handles upper and lowercase hex + t_body_reader( + ChunkedReader, + b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", + [Data(data=b"x" * 0xAA), EndOfMessage()], + ) + + # refuses arbitrarily long chunk integers + with pytest.raises(LocalProtocolError): + # Technically this is legal HTTP/1.1, but we refuse to process chunk + # sizes that don't fit into 20 characters of hex + t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) + + # refuses garbage in the chunk count + with pytest.raises(LocalProtocolError): + t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) + + # handles (and discards) "chunk extensions" omg wtf + t_body_reader( + ChunkedReader, + b"5; hello=there\r\n" + + b"xxxxx" + + b"\r\n" + + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', + [Data(data=b"xxxxx"), EndOfMessage()], + ) + + +def test_ContentLengthWriter(): + w = ContentLengthWriter(5) + assert dowrite(w, Data(data=b"123")) == b"123" + assert dowrite(w, Data(data=b"45")) == b"45" + assert dowrite(w, EndOfMessage()) == b"" + + w = ContentLengthWriter(5) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"123456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage()) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) == b"123" + dowrite(w, Data(data=b"45")) == b"45" + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_ChunkedWriter(): + w = ChunkedWriter() + assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" + assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" + + assert dowrite(w, Data(data=b"")) == b"" + + assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" + + assert ( + dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) + == b"0\r\netag: asdf\r\na: b\r\n\r\n" + ) + + +def test_Http10Writer(): + w = Http10Writer() + assert dowrite(w, Data(data=b"1234")) == b"1234" + assert dowrite(w, EndOfMessage()) == b"" + + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_reject_garbage_after_request_line(): + with pytest.raises(LocalProtocolError): + tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) + + +def test_reject_garbage_after_response_line(): + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", + None, + ) + + +def test_reject_garbage_in_header_line(): + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", + None, + ) + + +def test_reject_non_vchar_in_path(): + for bad_char in b"\x00\x20\x7f\xee": + message = bytearray(b"HEAD /") + message.append(bad_char) + message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], message, None) + + +# https://github.com/python-hyper/h11/issues/57 +def test_allow_some_garbage_in_cookies(): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: foo\r\n" + b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" + b"\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "foo"), + ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), + ], + ), + ) + + +def test_host_comes_first(): + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), + b"host: example.com\r\nfoo: bar\r\n\r\n", + ) diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_receivebuffer.py b/py3/lib/python3.6/site-packages/h11/tests/test_receivebuffer.py new file mode 100644 index 0000000..2be220b --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_receivebuffer.py @@ -0,0 +1,78 @@ +from .._receivebuffer import ReceiveBuffer + + +def test_receivebuffer(): + b = ReceiveBuffer() + assert not b + assert len(b) == 0 + assert bytes(b) == b"" + + b += b"123" + assert b + assert len(b) == 3 + assert bytes(b) == b"123" + + b.compress() + assert bytes(b) == b"123" + + assert b.maybe_extract_at_most(2) == b"12" + assert b + assert len(b) == 1 + assert bytes(b) == b"3" + + b.compress() + assert bytes(b) == b"3" + + assert b.maybe_extract_at_most(10) == b"3" + assert bytes(b) == b"" + + assert b.maybe_extract_at_most(10) is None + assert not b + + ################################################################ + # maybe_extract_until_next + ################################################################ + + b += b"12345a6789aa" + + assert b.maybe_extract_until_next(b"a") == b"12345a" + assert bytes(b) == b"6789aa" + + assert b.maybe_extract_until_next(b"aaa") is None + assert bytes(b) == b"6789aa" + + b += b"a12" + assert b.maybe_extract_until_next(b"aaa") == b"6789aaa" + assert bytes(b) == b"12" + + # check repeated searches for the same needle, triggering the + # pickup-where-we-left-off logic + b += b"345" + assert b.maybe_extract_until_next(b"aaa") is None + + b += b"6789aaa123" + assert b.maybe_extract_until_next(b"aaa") == b"123456789aaa" + assert bytes(b) == b"123" + + ################################################################ + # maybe_extract_lines + ################################################################ + + b += b"\r\na: b\r\nfoo:bar\r\n\r\ntrailing" + lines = b.maybe_extract_lines() + assert lines == [b"123", b"a: b", b"foo:bar"] + assert bytes(b) == b"trailing" + + assert b.maybe_extract_lines() is None + + b += b"\r\n\r" + assert b.maybe_extract_lines() is None + + assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" + assert not b + + # Empty body case (as happens at the end of chunked encoding if there are + # no trailing headers, e.g.) + b += b"\r\ntrailing" + assert b.maybe_extract_lines() == [] + assert bytes(b) == b"trailing" diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_state.py b/py3/lib/python3.6/site-packages/h11/tests/test_state.py new file mode 100644 index 0000000..efe83f0 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_state.py @@ -0,0 +1,250 @@ +import pytest + +from .._events import * +from .._state import * +from .._state import _SWITCH_CONNECT, _SWITCH_UPGRADE, ConnectionState +from .._util import LocalProtocolError + + +def test_ConnectionState(): + cs = ConnectionState() + + # Basic event-triggered transitions + + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + cs.process_event(CLIENT, Request) + # The SERVER-Request special case: + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # Illegal transitions raise an error and nothing happens + with pytest.raises(LocalProtocolError): + cs.process_event(CLIENT, Request) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: DONE, SERVER: DONE} + + # State-triggered transition + + cs.process_event(SERVER, ConnectionClosed) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + +def test_ConnectionState_keep_alive(): + # keep_alive = False + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_ConnectionState_keep_alive_in_DONE(): + # Check that if keep_alive is disabled when the CLIENT is already in DONE, + # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE + # transition + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states[CLIENT] is DONE + cs.process_keep_alive_disabled() + assert cs.states[CLIENT] is MUST_CLOSE + + +def test_ConnectionState_switch_denied(): + for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): + for deny_early in (True, False): + cs = ConnectionState() + cs.process_client_switch_proposal(switch_type) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + assert switch_type in cs.pending_switch_proposals + + if deny_early: + # before client reaches DONE + cs.process_event(SERVER, Response) + assert not cs.pending_switch_proposals + + cs.process_event(CLIENT, EndOfMessage) + + if deny_early: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + assert not cs.pending_switch_proposals + + +_response_type_for_switch = { + _SWITCH_UPGRADE: InformationalResponse, + _SWITCH_CONNECT: Response, + None: Response, +} + + +def test_ConnectionState_protocol_switch_accepted(): + for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(switch_event) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_double_protocol_switch(): + # CONNECT + Upgrade is legal! Very silly, but legal. So we support + # it. Because sometimes doing the silly thing is easier than not. + for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_client_switch_proposal(_SWITCH_CONNECT) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + cs.process_event( + SERVER, _response_type_for_switch[server_switch], server_switch + ) + if server_switch is None: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_inconsistent_protocol_switch(): + for client_switches, server_switch in [ + ([], _SWITCH_CONNECT), + ([], _SWITCH_UPGRADE), + ([_SWITCH_UPGRADE], _SWITCH_CONNECT), + ([_SWITCH_CONNECT], _SWITCH_UPGRADE), + ]: + cs = ConnectionState() + for client_switch in client_switches: + cs.process_client_switch_proposal(client_switch) + cs.process_event(CLIENT, Request) + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Response, server_switch) + + +def test_ConnectionState_keepalive_protocol_switch_interaction(): + # keep_alive=False + pending_switch_proposals + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # the protocol switch "wins" + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + # but when the server denies the request, keep_alive comes back into play + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} + + +def test_ConnectionState_reuse(): + cs = ConnectionState() + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + # No keepalive + + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # One side closed + + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(CLIENT, ConnectionClosed) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Succesful protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Failed protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + +def test_server_request_is_illegal(): + # There used to be a bug in how we handled the Request special case that + # made this allowed... + cs = ConnectionState() + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Request) diff --git a/py3/lib/python3.6/site-packages/h11/tests/test_util.py b/py3/lib/python3.6/site-packages/h11/tests/test_util.py new file mode 100644 index 0000000..74ab33b --- /dev/null +++ b/py3/lib/python3.6/site-packages/h11/tests/test_util.py @@ -0,0 +1,99 @@ +import re +import sys +import traceback + +import pytest + +from .._util import * + + +def test_ProtocolError(): + with pytest.raises(TypeError): + ProtocolError("abstract base class") + + +def test_LocalProtocolError(): + try: + raise LocalProtocolError("foo") + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 400 + + try: + raise LocalProtocolError("foo", error_status_hint=418) + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 418 + + def thunk(): + raise LocalProtocolError("a", error_status_hint=420) + + try: + try: + thunk() + except LocalProtocolError as exc1: + orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + exc1._reraise_as_remote_protocol_error() + except RemoteProtocolError as exc2: + assert type(exc2) is RemoteProtocolError + assert exc2.args == ("a",) + assert exc2.error_status_hint == 420 + new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + assert new_traceback.endswith(orig_traceback) + + +def test_validate(): + my_re = re.compile(br"(?P[0-9]+)\.(?P[0-9]+)") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.") + + groups = validate(my_re, b"0.1") + assert groups == {"group1": b"0", "group2": b"1"} + + # successful partial matches are an error - must match whole string + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1xx") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1\n") + + +def test_validate_formatting(): + my_re = re.compile(br"foo") + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops") + assert "oops" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {}") + assert "oops {}" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {} xx", 10) + assert "oops 10 xx" in str(excinfo.value) + + +def test_make_sentinel(): + S = make_sentinel("S") + assert repr(S) == "S" + assert S == S + assert type(S).__name__ == "S" + assert S in {S} + assert type(S) is S + S2 = make_sentinel("S2") + assert repr(S2) == "S2" + assert S != S2 + assert S not in {S2} + assert type(S) is not type(S2) + + +def test_bytesify(): + assert bytesify(b"123") == b"123" + assert bytesify(bytearray(b"123")) == b"123" + assert bytesify("123") == b"123" + + with pytest.raises(UnicodeEncodeError): + bytesify(u"\u1234") + + with pytest.raises(TypeError): + bytesify(10) diff --git a/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/LICENSE b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/LICENSE new file mode 100644 index 0000000..7bb76c5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2016 Cory Benfield and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/METADATA b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/METADATA new file mode 100644 index 0000000..81983d5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/METADATA @@ -0,0 +1,838 @@ +Metadata-Version: 2.1 +Name: h2 +Version: 3.1.1 +Summary: HTTP/2 State-Machine based protocol implementation +Home-page: http://hyper.rtfd.org +Author: Cory Benfield +Author-email: cory@lukasa.co.uk +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: hyperframe (<6,>=5.2.0) +Requires-Dist: hpack (<4,>=2.3) +Requires-Dist: enum34 (<2,>=1.1.6) ; python_version == "2.7" or python_version == "3.3" + +=============================== +hyper-h2: HTTP/2 Protocol Stack +=============================== + +.. image:: https://raw.github.com/Lukasa/hyper/development/docs/source/images/hyper.png + +.. image:: https://travis-ci.org/python-hyper/hyper-h2.svg?branch=master + :target: https://travis-ci.org/python-hyper/hyper-h2 + +This repository contains a pure-Python implementation of a HTTP/2 protocol +stack. It's written from the ground up to be embeddable in whatever program you +choose to use, ensuring that you can speak HTTP/2 regardless of your +programming paradigm. + +You use it like this: + +.. code-block:: python + + import h2.connection + + conn = h2.connection.H2Connection() + conn.send_headers(stream_id=stream_id, headers=headers) + conn.send_data(stream_id, data) + socket.sendall(conn.data_to_send()) + events = conn.receive_data(socket_data) + +This repository does not provide a parsing layer, a network layer, or any rules +about concurrency. Instead, it's a purely in-memory solution, defined in terms +of data actions and HTTP/2 frames. This is one building block of a full Python +HTTP implementation. + +To install it, just run: + +.. code-block:: console + + $ pip install h2 + +Documentation +============= + +Documentation is available at http://python-hyper.org/h2/. + +Contributing +============ + +``hyper-h2`` welcomes contributions from anyone! Unlike many other projects we +are happy to accept cosmetic contributions and small contributions, in addition +to large feature requests and changes. + +Before you contribute (either by opening an issue or filing a pull request), +please `read the contribution guidelines`_. + +.. _read the contribution guidelines: http://python-hyper.org/en/latest/contributing.html + +License +======= + +``hyper-h2`` is made available under the MIT License. For more details, see the +``LICENSE`` file in the repository. + +Authors +======= + +``hyper-h2`` is maintained by Cory Benfield, with contributions from others. For +more details about the contributors, please see ``CONTRIBUTORS.rst``. + + +Release History +=============== + +3.1.1 (2019-08-02) +------------------ + +Bugfixes +~~~~~~~~ + +- Ignore WINDOW_UPDATE and RST_STREAM frames received after stream + closure. + + +3.1.0 (2019-01-22) +------------------ + +API Changes (Backward-Incompatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- ``h2.connection.H2Connection.data_to_send`` first and only argument ``amt`` + was renamed to ``amount``. +- Support for Python 3.3 has been removed. + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- ``h2.connection.H2Connection.send_data`` now supports ``data`` parameter + being a ``memoryview`` object. +- Refactor ping-related events: a ``h2.events.PingReceived`` event is fired + when a PING frame is received and a ``h2.events.PingAckReceived`` event is + fired when a PING frame with an ACK flag is received. + ``h2.events.PingAcknowledged`` is deprecated in favour of the identical + ``h2.events.PingAckReceived``. +- Added ``ENABLE_CONNECT_PROTOCOL`` to ``h2.settings.SettingCodes``. +- Support ``CONNECT`` requests with a ``:protocol`` pseudo header + thereby supporting RFC 8441. +- A limit to the number of closed streams kept in memory by the + connection is applied. It can be configured by + ``h2.connection.H2Connection.MAX_CLOSED_STREAMS``. + +Bugfixes +~~~~~~~~ + +- Debug logging when stream_id is None is now fixed and no longer errors. + +3.0.1 (2017-04-03) +------------------ + +Bugfixes +~~~~~~~~ + +- CONTINUATION frames sent on closed streams previously caused stream errors + of type STREAM_CLOSED. RFC 7540 § 6.10 requires that these be connection + errors of type PROTOCOL_ERROR, and so this release changes to match that + behaviour. +- Remote peers incrementing their inbound connection window beyond the maximum + allowed value now cause stream-level errors, rather than connection-level + errors, allowing connections to stay up longer. +- h2 now rejects receiving and sending request header blocks that are missing + any of the mandatory pseudo-header fields (:path, :scheme, and :method). +- h2 now rejects receiving and sending request header blocks that have an empty + :path pseudo-header value. +- h2 now rejects receiving and sending request header blocks that contain + response-only pseudo-headers, and vice versa. +- h2 now correct respects user-initiated changes to the HEADER_TABLE_SIZE + local setting, and ensures that if users shrink or increase the header + table size it is policed appropriately. + + +2.6.2 (2017-04-03) +------------------ + +Bugfixes +~~~~~~~~ + +- CONTINUATION frames sent on closed streams previously caused stream errors + of type STREAM_CLOSED. RFC 7540 § 6.10 requires that these be connection + errors of type PROTOCOL_ERROR, and so this release changes to match that + behaviour. +- Remote peers incrementing their inbound connection window beyond the maximum + allowed value now cause stream-level errors, rather than connection-level + errors, allowing connections to stay up longer. +- h2 now rejects receiving and sending request header blocks that are missing + any of the mandatory pseudo-header fields (:path, :scheme, and :method). +- h2 now rejects receiving and sending request header blocks that have an empty + :path pseudo-header value. +- h2 now rejects receiving and sending request header blocks that contain + response-only pseudo-headers, and vice versa. +- h2 now correct respects user-initiated changes to the HEADER_TABLE_SIZE + local setting, and ensures that if users shrink or increase the header + table size it is policed appropriately. + + +2.5.4 (2017-04-03) +------------------ + +Bugfixes +~~~~~~~~ + +- CONTINUATION frames sent on closed streams previously caused stream errors + of type STREAM_CLOSED. RFC 7540 § 6.10 requires that these be connection + errors of type PROTOCOL_ERROR, and so this release changes to match that + behaviour. +- Remote peers incrementing their inbound connection window beyond the maximum + allowed value now cause stream-level errors, rather than connection-level + errors, allowing connections to stay up longer. +- h2 now correct respects user-initiated changes to the HEADER_TABLE_SIZE + local setting, and ensures that if users shrink or increase the header + table size it is policed appropriately. + + +3.0.0 (2017-03-24) +------------------ + +API Changes (Backward-Incompatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- By default, hyper-h2 now joins together received cookie header fields, per + RFC 7540 Section 8.1.2.5. +- Added a ``normalize_inbound_headers`` flag to the ``H2Configuration`` object + that defaults to ``True``. Setting this to ``False`` changes the behaviour + from the previous point back to the v2 behaviour. +- Removed deprecated fields from ``h2.errors`` module. +- Removed deprecated fields from ``h2.settings`` module. +- Removed deprecated ``client_side`` and ``header_encoding`` arguments from + ``H2Connection``. +- Removed deprecated ``client_side`` and ``header_encoding`` properties from + ``H2Connection``. +- ``dict`` objects are no longer allowed for user-supplied headers. +- The default header encoding is now ``None``, not ``utf-8``: this means that + all events that carry headers now return those headers as byte strings by + default. The header encoding can be set back to ``utf-8`` to restore the old + behaviour. + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added new ``UnknownFrameReceived`` event that fires when unknown extension + frames have been received. This only fires when using hyperframe 5.0 or + later: earlier versions of hyperframe cause us to silently ignore extension + frames. + +Bugfixes +~~~~~~~~ + +None + + +2.6.1 (2017-03-16) +------------------ + +Bugfixes +~~~~~~~~ + +- Allowed hyperframe v5 support while continuing to ignore unexpected frames. + + +2.5.3 (2017-03-16) +------------------ + +Bugfixes +~~~~~~~~ + +- Allowed hyperframe v5 support while continuing to ignore unexpected frames. + + +2.4.4 (2017-03-16) +------------------ + +Bugfixes +~~~~~~~~ + +- Allowed hyperframe v5 support while continuing to ignore unexpected frames. + + +2.6.0 (2017-02-28) +------------------ + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added a new ``h2.events.Event`` class that acts as a base class for all + events. +- Rather than reject outbound Connection-specific headers, h2 will now + normalize the header block by removing them. +- Implement equality for the ``h2.settings.Settings`` class. +- Added ``h2.settings.SettingCodes``, an enum that is used to store all the + HTTP/2 setting codes. This allows us to use a better printed representation of + the setting code in most places that it is used. +- The ``setting`` field in ``ChangedSetting`` for the ``RemoteSettingsChanged`` + and ``SettingsAcknowledged`` events has been updated to be instances of + ``SettingCodes`` whenever they correspond to a known setting code. When they + are an unknown setting code, they are instead ``int``. As ``SettingCodes`` is + a subclass of ``int``, this is non-breaking. +- Deprecated the other fields in ``h2.settings``. These will be removed in + 3.0.0. +- Added an optional ``pad_length`` parameter to ``H2Connection.send_data`` + to allow the user to include padding on a data frame. +- Added a new parameter to the ``h2.config.H2Configuration`` initializer which + takes a logger. This allows us to log by providing a logger that conforms + to the requirements of this module so that it can be used in different + environments. + +Bugfixes +~~~~~~~~ + +- Correctly reject pushed request header blocks whenever they have malformed + request header blocks. +- Correctly normalize pushed request header blocks whenever they have + normalizable header fields. +- Remote peers are now allowed to send zero or any positive number as a value + for ``SETTINGS_MAX_HEADER_LIST_SIZE``, where previously sending zero would + raise a ``InvalidSettingsValueError``. +- Resolved issue where the ``HTTP2-Settings`` header value for plaintext + upgrade that was emitted by ``initiate_upgrade_connection`` included the + *entire* ``SETTINGS`` frame, instead of just the payload. +- Resolved issue where the ``HTTP2-Settings`` header value sent by a client for + plaintext upgrade would be ignored by ``initiate_upgrade_connection``, rather + than have those settings applied appropriately. +- Resolved an issue whereby certain frames received from a peer in the CLOSED + state would trigger connection errors when RFC 7540 says they should have + triggered stream errors instead. Added more detailed stream closure tracking + to ensure we don't throw away connections unnecessarily. + + +2.5.2 (2017-01-27) +------------------ + +- Resolved issue where the ``HTTP2-Settings`` header value for plaintext + upgrade that was emitted by ``initiate_upgrade_connection`` included the + *entire* ``SETTINGS`` frame, instead of just the payload. +- Resolved issue where the ``HTTP2-Settings`` header value sent by a client for + plaintext upgrade would be ignored by ``initiate_upgrade_connection``, rather + than have those settings applied appropriately. + + +2.4.3 (2017-01-27) +------------------ + +- Resolved issue where the ``HTTP2-Settings`` header value for plaintext + upgrade that was emitted by ``initiate_upgrade_connection`` included the + *entire* ``SETTINGS`` frame, instead of just the payload. +- Resolved issue where the ``HTTP2-Settings`` header value sent by a client for + plaintext upgrade would be ignored by ``initiate_upgrade_connection``, rather + than have those settings applied appropriately. + + +2.3.4 (2017-01-27) +------------------ + +- Resolved issue where the ``HTTP2-Settings`` header value for plaintext + upgrade that was emitted by ``initiate_upgrade_connection`` included the + *entire* ``SETTINGS`` frame, instead of just the payload. +- Resolved issue where the ``HTTP2-Settings`` header value sent by a client for + plaintext upgrade would be ignored by ``initiate_upgrade_connection``, rather + than have those settings applied appropriately. + + +2.5.1 (2016-12-17) +------------------ + +Bugfixes +~~~~~~~~ + +- Remote peers are now allowed to send zero or any positive number as a value + for ``SETTINGS_MAX_HEADER_LIST_SIZE``, where previously sending zero would + raise a ``InvalidSettingsValueError``. + + +2.5.0 (2016-10-25) +------------------ + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added a new ``H2Configuration`` object that allows rich configuration of + a ``H2Connection``. This object supersedes the prior keyword arguments to the + ``H2Connection`` object, which are now deprecated and will be removed in 3.0. +- Added support for automated window management via the + ``acknowledge_received_data`` method. See the documentation for more details. +- Added a ``DenialOfServiceError`` that is raised whenever a behaviour that + looks like a DoS attempt is encountered: for example, an overly large + decompressed header list. This is a subclass of ``ProtocolError``. +- Added support for setting and managing ``SETTINGS_MAX_HEADER_LIST_SIZE``. + This setting is now defaulted to 64kB. +- Added ``h2.errors.ErrorCodes``, an enum that is used to store all the HTTP/2 + error codes. This allows us to use a better printed representation of the + error code in most places that it is used. +- The ``error_code`` fields on ``ConnectionTerminated`` and ``StreamReset`` + events have been updated to be instances of ``ErrorCodes`` whenever they + correspond to a known error code. When they are an unknown error code, they + are instead ``int``. As ``ErrorCodes`` is a subclass of ``int``, this is + non-breaking. +- Deprecated the other fields in ``h2.errors``. These will be removed in 3.0.0. + +Bugfixes +~~~~~~~~ + +- Correctly reject request header blocks with neither :authority nor Host + headers, or header blocks which contain mismatched :authority and Host + headers, per RFC 7540 Section 8.1.2.3. +- Correctly expect that responses to HEAD requests will have no body regardless + of the value of the Content-Length header, and reject those that do. +- Correctly refuse to send header blocks that contain neither :authority nor + Host headers, or header blocks which contain mismatched :authority and Host + headers, per RFC 7540 Section 8.1.2.3. +- Hyper-h2 will now reject header field names and values that contain leading + or trailing whitespace. +- Correctly strip leading/trailing whitespace from header field names and + values. +- Correctly refuse to send header blocks with a TE header whose value is not + ``trailers``, per RFC 7540 Section 8.1.2.2. +- Correctly refuse to send header blocks with connection-specific headers, + per RFC 7540 Section 8.1.2.2. +- Correctly refuse to send header blocks that contain duplicate pseudo-header + fields, or with pseudo-header fields that appear after ordinary header fields, + per RFC 7540 Section 8.1.2.1. + + This may cause passing a dictionary as the header block to ``send_headers`` + to throw a ``ProtocolError``, because dictionaries are unordered and so they + may trip this check. Passing dictionaries here is deprecated, and callers + should change to using a sequence of 2-tuples as their header blocks. +- Correctly reject trailers that contain HTTP/2 pseudo-header fields, per RFC + 7540 Section 8.1.2.1. +- Correctly refuse to send trailers that contain HTTP/2 pseudo-header fields, + per RFC 7540 Section 8.1.2.1. +- Correctly reject responses that do not contain the ``:status`` header field, + per RFC 7540 Section 8.1.2.4. +- Correctly refuse to send responses that do not contain the ``:status`` header + field, per RFC 7540 Section 8.1.2.4. +- Correctly update the maximum frame size when the user updates the value of + that setting. Prior to this release, if the user updated the maximum frame + size hyper-h2 would ignore the update, preventing the remote peer from using + the higher frame sizes. + +2.4.2 (2016-10-25) +------------------ + +Bugfixes +~~~~~~~~ + +- Correctly update the maximum frame size when the user updates the value of + that setting. Prior to this release, if the user updated the maximum frame + size hyper-h2 would ignore the update, preventing the remote peer from using + the higher frame sizes. + +2.3.3 (2016-10-25) +------------------ + +Bugfixes +~~~~~~~~ + +- Correctly update the maximum frame size when the user updates the value of + that setting. Prior to this release, if the user updated the maximum frame + size hyper-h2 would ignore the update, preventing the remote peer from using + the higher frame sizes. + +2.2.7 (2016-10-25) +------------------ + +*Final 2.2.X release* + +Bugfixes +~~~~~~~~ + +- Correctly update the maximum frame size when the user updates the value of + that setting. Prior to this release, if the user updated the maximum frame + size hyper-h2 would ignore the update, preventing the remote peer from using + the higher frame sizes. + +2.4.1 (2016-08-23) +------------------ + +Bugfixes +~~~~~~~~ + +- Correctly expect that responses to HEAD requests will have no body regardless + of the value of the Content-Length header, and reject those that do. + +2.3.2 (2016-08-23) +------------------ + +Bugfixes +~~~~~~~~ + +- Correctly expect that responses to HEAD requests will have no body regardless + of the value of the Content-Length header, and reject those that do. + +2.4.0 (2016-07-01) +------------------ + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Adds ``additional_data`` to ``H2Connection.close_connection``, allowing the + user to send additional debug data on the GOAWAY frame. +- Adds ``last_stream_id`` to ``H2Connection.close_connection``, allowing the + user to manually control what the reported last stream ID is. +- Add new method: ``prioritize``. +- Add support for emitting stream priority information when sending headers + frames using three new keyword arguments: ``priority_weight``, + ``priority_depends_on``, and ``priority_exclusive``. +- Add support for "related events": events that fire simultaneously on a single + frame. + + +2.3.1 (2016-05-12) +------------------ + +Bugfixes +~~~~~~~~ + +- Resolved ``AttributeError`` encountered when receiving more than one sequence + of CONTINUATION frames on a given connection. + + +2.2.5 (2016-05-12) +------------------ + +Bugfixes +~~~~~~~~ + +- Resolved ``AttributeError`` encountered when receiving more than one sequence + of CONTINUATION frames on a given connection. + + +2.3.0 (2016-04-26) +------------------ + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added a new flag to the ``H2Connection`` constructor: ``header_encoding``, + that controls what encoding is used (if any) to decode the headers from bytes + to unicode. This defaults to UTF-8 for backward compatibility. To disable the + decode and use bytes exclusively, set the field to False, None, or the empty + string. This affects all headers, including those pushed by servers. +- Bumped the minimum version of HPACK allowed from 2.0 to 2.2. +- Added support for advertising RFC 7838 Alternative services. +- Allowed users to provide ``hpack.HeaderTuple`` and + ``hpack.NeverIndexedHeaderTuple`` objects to all methods that send headers. +- Changed all events that carry headers to emit ``hpack.HeaderTuple`` and + ``hpack.NeverIndexedHeaderTuple`` instead of plain tuples. This allows users + to maintain header indexing state. +- Added support for plaintext upgrade with the ``initiate_upgrade_connection`` + method. + +Bugfixes +~~~~~~~~ + +- Automatically ensure that all ``Authorization`` and ``Proxy-Authorization`` + headers, as well as short ``Cookie`` headers, are prevented from being added + to encoding contexts. + +2.2.4 (2016-04-25) +------------------ + +Bugfixes +~~~~~~~~ + +- Correctly forbid pseudo-headers that were not defined in RFC 7540. +- Ignore AltSvc frames, rather than exploding when receiving them. + +2.1.5 (2016-04-25) +------------------ + +*Final 2.1.X release* + +Bugfixes +~~~~~~~~ + +- Correctly forbid pseudo-headers that were not defined in RFC 7540. +- Ignore AltSvc frames, rather than exploding when receiving them. + +2.2.3 (2016-04-13) +------------------ + +Bugfixes +~~~~~~~~ + +- Allowed the 4.X series of hyperframe releases as dependencies. + +2.1.4 (2016-04-13) +------------------ + +Bugfixes +~~~~~~~~ + +- Allowed the 4.X series of hyperframe releases as dependencies. + + +2.2.2 (2016-04-05) +------------------ + +Bugfixes +~~~~~~~~ + +- Fixed issue where informational responses were erroneously not allowed to be + sent in the ``HALF_CLOSED_REMOTE`` state. +- Fixed issue where informational responses were erroneously not allowed to be + received in the ``HALF_CLOSED_LOCAL`` state. +- Fixed issue where we allowed information responses to be sent or received + after final responses. + +2.2.1 (2016-03-23) +------------------ + +Bugfixes +~~~~~~~~ + +- Fixed issue where users using locales that did not default to UTF-8 were + unable to install source distributions of the package. + +2.2.0 (2016-03-23) +------------------ + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added support for sending informational responses (responses with 1XX status) + codes as part of the standard flow. HTTP/2 allows zero or more informational + responses with no upper limit: hyper-h2 does too. +- Added support for receiving informational responses (responses with 1XX + status) codes as part of the standard flow. HTTP/2 allows zero or more + informational responses with no upper limit: hyper-h2 does too. +- Added a new event: ``ReceivedInformationalResponse``. This response is fired + when informational responses (those with 1XX status codes). +- Added an ``additional_data`` field to the ``ConnectionTerminated`` event that + carries any additional data sent on the GOAWAY frame. May be ``None`` if no + such data was sent. +- Added the ``initial_values`` optional argument to the ``Settings`` object. + +Bugfixes +~~~~~~~~ + +- Correctly reject all of the connection-specific headers mentioned in RFC 7540 + § 8.1.2.2, not just the ``Connection:`` header. +- Defaulted the value of ``SETTINGS_MAX_CONCURRENT_STREAMS`` to 100, unless + explicitly overridden. This is a safe defensive initial value for this + setting. + +2.1.3 (2016-03-16) +------------------ + +Deprecations +~~~~~~~~~~~~ + +- Passing dictionaries to ``send_headers`` as the header block is deprecated, + and will be removed in 3.0. + +2.1.2 (2016-02-17) +------------------ + +Bugfixes +~~~~~~~~ + +- Reject attempts to push streams on streams that were themselves pushed: + streams can only be pushed on streams that were initiated by the client. +- Correctly allow CONTINUATION frames to extend the header block started by a + PUSH_PROMISE frame. +- Changed our handling of frames received on streams that were reset by the + user. + + Previously these would, at best, cause ProtocolErrors to be raised and the + connection to be torn down (rather defeating the point of resetting streams + at all) and, at worst, would cause subtle inconsistencies in state between + hyper-h2 and the remote peer that could lead to header block decoding errors + or flow control blockages. + + Now when the user resets a stream all further frames received on that stream + are ignored except where they affect some form of connection-level state, + where they have their effect and are then ignored. +- Fixed a bug whereby receiving a PUSH_PROMISE frame on a stream that was + closed would cause a RST_STREAM frame to be emitted on the closed-stream, + but not the newly-pushed one. Now this causes a ``ProtocolError``. + +2.1.1 (2016-02-05) +------------------ + +Bugfixes +~~~~~~~~ + +- Added debug representations for all events. +- Fixed problems with setup.py that caused trouble on older setuptools/pip + installs. + +2.1.0 (2016-02-02) +------------------ + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added new field to ``DataReceived``: ``flow_controlled_length``. This is the + length of the frame including padded data, allowing users to correctly track + changes to the flow control window. +- Defined new ``UnsupportedFrameError``, thrown when frames that are known to + hyperframe but not supported by hyper-h2 are received. For + backward-compatibility reasons, this is a ``ProtocolError`` *and* a + ``KeyError``. + +Bugfixes +~~~~~~~~ + +- Hyper-h2 now correctly accounts for padding when maintaining flow control + windows. +- Resolved a bug where hyper-h2 would mistakenly apply + SETTINGS_INITIAL_WINDOW_SIZE to the connection flow control window in + addition to the stream-level flow control windows. +- Invalid Content-Length headers now throw ``ProtocolError`` exceptions and + correctly tear the connection down, instead of leaving the connection in an + indeterminate state. +- Invalid header blocks now throw ``ProtocolError``, rather than a grab bag of + possible other exceptions. + +2.0.0 (2016-01-25) +------------------ + +API Changes (Breaking) +~~~~~~~~~~~~~~~~~~~~~~ + +- Attempts to open streams with invalid stream IDs, either by the remote peer + or by the user, are now rejected as a ``ProtocolError``. Previously these + were allowed, and would cause remote peers to error. +- Receiving frames that have invalid padding now causes the connection to be + terminated with a ``ProtocolError`` being raised. Previously these passed + undetected. +- Settings values set by both the user and the remote peer are now validated + when they're set. If they're invalid, a new ``InvalidSettingsValueError`` is + raised and, if set by the remote peer, a connection error is signaled. + Previously, it was possible to set invalid values. These would either be + caught when building frames, or would be allowed to stand. +- Settings changes no longer require user action to be acknowledged: hyper-h2 + acknowledges them automatically. This moves the location where some + exceptions may be thrown, and also causes the ``acknowledge_settings`` method + to be removed from the public API. +- Removed a number of methods on the ``H2Connection`` object from the public, + semantically versioned API, by renaming them to have leading underscores. + Specifically, removed: + + - ``get_stream_by_id`` + - ``get_or_create_stream`` + - ``begin_new_stream`` + - ``receive_frame`` + - ``acknowledge_settings`` + +- Added full support for receiving CONTINUATION frames, including policing + logic about when and how they are received. Previously, receiving + CONTINUATION frames was not supported and would throw exceptions. +- All public API functions on ``H2Connection`` except for ``receive_data`` no + longer return lists of events, because these lists were always empty. Events + are now only raised by ``receive_data``. +- Calls to ``increment_flow_control_window`` with out of range values now raise + ``ValueError`` exceptions. Previously they would be allowed, or would cause + errors when serializing frames. + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added ``PriorityUpdated`` event for signaling priority changes. +- Added ``get_next_available_stream_id`` function. +- Receiving DATA frames on streams not in the OPEN or HALF_CLOSED_LOCAL states + now causes a stream reset, rather than a connection reset. The error is now + also classified as a ``StreamClosedError``, rather than a more generic + ``ProtocolError``. +- Receiving HEADERS or PUSH_PROMISE frames in the HALF_CLOSED_REMOTE state now + causes a stream reset, rather than a connection reset. +- Receiving frames that violate the max frame size now causes connection errors + with error code FRAME_SIZE_ERROR, not a generic PROTOCOL_ERROR. This + condition now also raises a ``FrameTooLargeError``, a new subclass of + ``ProtocolError``. +- Made ``NoSuchStreamError`` a subclass of ``ProtocolError``. +- The ``StreamReset`` event is now also fired whenever a protocol error from + the remote peer forces a stream to close early. This is only fired once. +- The ``StreamReset`` event now carries a flag, ``remote_reset``, that is set + to ``True`` in all cases where ``StreamReset`` would previously have fired + (e.g. when the remote peer sent a RST_STREAM), and is set to ``False`` when + it fires because the remote peer made a protocol error. +- Hyper-h2 now rejects attempts by peers to increment a flow control window by + zero bytes. +- Hyper-h2 now rejects peers sending header blocks that are ill-formed for a + number of reasons as set out in RFC 7540 Section 8.1.2. +- Attempting to send non-PRIORITY frames on closed streams now raises + ``StreamClosedError``. +- Remote peers attempting to increase the flow control window beyond + ``2**31 - 1``, either by window increment or by settings frame, are now + rejected as ``ProtocolError``. +- Local attempts to increase the flow control window beyond ``2**31 - 1`` by + window increment are now rejected as ``ProtocolError``. +- The bytes that represent individual settings are now available in + ``h2.settings``, instead of needing users to import them from hyperframe. + +Bugfixes +~~~~~~~~ + +- RFC 7540 requires that a separate minimum stream ID be used for inbound and + outbound streams. Hyper-h2 now obeys this requirement. +- Hyper-h2 now does a better job of reporting the last stream ID it has + partially handled when terminating connections. +- Fixed an error in the arguments of ``StreamIDTooLowError``. +- Prevent ``ValueError`` leaking from Hyperframe. +- Prevent ``struct.error`` and ``InvalidFrameError`` leaking from Hyperframe. + +1.1.1 (2015-11-17) +------------------ + +Bugfixes +~~~~~~~~ + +- Forcibly lowercase all header names to improve compatibility with + implementations that demand lower-case header names. + +1.1.0 (2015-10-28) +------------------ + +API Changes (Backward-Compatible) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Added a new ``ConnectionTerminated`` event, which fires when GOAWAY frames + are received. +- Added a subclass of ``NoSuchStreamError``, called ``StreamClosedError``, that + fires when actions are taken on a stream that is closed and has had its state + flushed from the system. +- Added ``StreamIDTooLowError``, raised when the user or the remote peer + attempts to create a stream with an ID lower than one previously used in the + dialog. Inherits from ``ValueError`` for backward-compatibility reasons. + +Bugfixes +~~~~~~~~ + +- Do not throw ``ProtocolError`` when attempting to send multiple GOAWAY + frames on one connection. +- We no longer forcefully change the decoder table size when settings changes + are ACKed, instead waiting for remote acknowledgement of the change. +- Improve the performance of checking whether a stream is open. +- We now attempt to lazily garbage collect closed streams, to avoid having the + state hang around indefinitely, leaking memory. +- Avoid further per-stream allocations, leading to substantial performance + improvements when many short-lived streams are used. + +1.0.0 (2015-10-15) +------------------ + +- First production release! + + diff --git a/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/RECORD b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/RECORD new file mode 100644 index 0000000..da7feac --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/RECORD @@ -0,0 +1,28 @@ +h2/__init__.py,sha256=z6fp72Js9JmPRUuM0ogdmz-Yz0qSCwxsziO19EmK_Bg,86 +h2/config.py,sha256=yUt4qqDqe7wtpsWUmsScDhWCoJC7pV5DIrIjkHbPoY0,6498 +h2/connection.py,sha256=QKga32XTvB-wIZc9hQLmuJeG0e1xHtDy7lDu6cytv5Y,81545 +h2/errors.py,sha256=zVc5DO6IBAv1EkJxdb-mpRdqxUBfb5W0gwdqBLpqy80,1553 +h2/events.py,sha256=_zttawGqPVmEVeZN1S3kGREQ-VSt1xP07SebUxLqAF4,21962 +h2/exceptions.py,sha256=rdeQYMyQ46s6gkRkkb02YXsKZnc1VFuPqpPKr8Q-8a8,5379 +h2/frame_buffer.py,sha256=NBeQ94joXLE9XkGpfGFeB_VIOBNq_9fINy90YUG0qD4,6771 +h2/settings.py,sha256=hUohNoWHMouGhOWiDc2r5sceqMGTuVsDpi9KmsPCDMY,11814 +h2/stream.py,sha256=AL5KGnEZFKaLp2JtKw-XEEInJVccKmgGjOjM2ay12W4,54447 +h2/utilities.py,sha256=JI6DcBWD0rXiAafbCfnXmroYK1Q28g4bqHLqeqA29_4,22818 +h2/windows.py,sha256=XRuu1Y2WlTq6TuEpuhfvl5R6TIGysoU9s0mpFVgjIUA,5603 +h2-3.1.1.dist-info/LICENSE,sha256=Zj-SU-E1GbgqtKaxyqtr7QWq2nBRfyjNAlS-ip-hntY,1102 +h2-3.1.1.dist-info/METADATA,sha256=cUY0Id4rwVixaFgr67opavhBwUltltA95V63QGXfYHs,31736 +h2-3.1.1.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +h2-3.1.1.dist-info/top_level.txt,sha256=Hiulx8KxI2jFUM1dG7-CZeRkO3j50MBwCLG36Vrq-kI,3 +h2-3.1.1.dist-info/RECORD,, +h2-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h2/__pycache__/events.cpython-36.pyc,, +h2/__pycache__/config.cpython-36.pyc,, +h2/__pycache__/utilities.cpython-36.pyc,, +h2/__pycache__/settings.cpython-36.pyc,, +h2/__pycache__/frame_buffer.cpython-36.pyc,, +h2/__pycache__/errors.cpython-36.pyc,, +h2/__pycache__/__init__.cpython-36.pyc,, +h2/__pycache__/connection.cpython-36.pyc,, +h2/__pycache__/stream.cpython-36.pyc,, +h2/__pycache__/windows.cpython-36.pyc,, +h2/__pycache__/exceptions.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/WHEEL b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/WHEEL new file mode 100644 index 0000000..78e6f69 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/top_level.txt new file mode 100644 index 0000000..c48b563 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2-3.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +h2 diff --git a/py3/lib/python3.6/site-packages/h2/__init__.py b/py3/lib/python3.6/site-packages/h2/__init__.py new file mode 100644 index 0000000..cb4d24b --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +""" +h2 +~~ + +A HTTP/2 implementation. +""" +__version__ = '3.1.1' diff --git a/py3/lib/python3.6/site-packages/h2/config.py b/py3/lib/python3.6/site-packages/h2/config.py new file mode 100644 index 0000000..08129a4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/config.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +""" +h2/config +~~~~~~~~~ + +Objects for controlling the configuration of the HTTP/2 stack. +""" + + +class _BooleanConfigOption(object): + """ + Descriptor for handling a boolean config option. This will block + attempts to set boolean config options to non-bools. + """ + def __init__(self, name): + self.name = name + self.attr_name = '_%s' % self.name + + def __get__(self, instance, owner): + return getattr(instance, self.attr_name) + + def __set__(self, instance, value): + if not isinstance(value, bool): + raise ValueError("%s must be a bool" % self.name) + setattr(instance, self.attr_name, value) + + +class DummyLogger(object): + """ + An Logger object that does not actual logging, hence a DummyLogger. + + For the class the log operation is merely a no-op. The intent is to avoid + conditionals being sprinkled throughout the hyper-h2 code for calls to + logging functions when no logger is passed into the corresponding object. + """ + def __init__(self, *vargs): + pass + + def debug(self, *vargs, **kwargs): + """ + No-op logging. Only level needed for now. + """ + pass + + +class H2Configuration(object): + """ + An object that controls the way a single HTTP/2 connection behaves. + + This object allows the users to customize behaviour. In particular, it + allows users to enable or disable optional features, or to otherwise handle + various unusual behaviours. + + This object has very little behaviour of its own: it mostly just ensures + that configuration is self-consistent. + + :param client_side: Whether this object is to be used on the client side of + a connection, or on the server side. Affects the logic used by the + state machine, the default settings values, the allowable stream IDs, + and several other properties. Defaults to ``True``. + :type client_side: ``bool`` + + :param header_encoding: Controls whether the headers emitted by this object + in events are transparently decoded to ``unicode`` strings, and what + encoding is used to do that decoding. This defaults to ``None``, + meaning that headers will be returned as bytes. To automatically + decode headers (that is, to return them as unicode strings), this can + be set to the string name of any encoding, e.g. ``'utf-8'``. + + .. versionchanged:: 3.0.0 + Changed default value from ``'utf-8'`` to ``None`` + + :type header_encoding: ``str``, ``False``, or ``None`` + + :param validate_outbound_headers: Controls whether the headers emitted + by this object are validated against the rules in RFC 7540. + Disabling this setting will cause outbound header validation to + be skipped, and allow the object to emit headers that may be illegal + according to RFC 7540. Defaults to ``True``. + :type validate_outbound_headers: ``bool`` + + :param normalize_outbound_headers: Controls whether the headers emitted + by this object are normalized before sending. Disabling this setting + will cause outbound header normalization to be skipped, and allow + the object to emit headers that may be illegal according to + RFC 7540. Defaults to ``True``. + :type normalize_outbound_headers: ``bool`` + + :param validate_inbound_headers: Controls whether the headers received + by this object are validated against the rules in RFC 7540. + Disabling this setting will cause inbound header validation to + be skipped, and allow the object to receive headers that may be illegal + according to RFC 7540. Defaults to ``True``. + :type validate_inbound_headers: ``bool`` + + :param normalize_inbound_headers: Controls whether the headers received by + this object are normalized according to the rules of RFC 7540. + Disabling this setting may lead to hyper-h2 emitting header blocks that + some RFCs forbid, e.g. with multiple cookie fields. + + .. versionadded:: 3.0.0 + + :type normalize_inbound_headers: ``bool`` + + :param logger: A logger that conforms to the requirements for this module, + those being no I/O and no context switches, which is needed in order + to run in asynchronous operation. + + .. versionadded:: 2.6.0 + + :type logger: ``logging.Logger`` + """ + client_side = _BooleanConfigOption('client_side') + validate_outbound_headers = _BooleanConfigOption( + 'validate_outbound_headers' + ) + normalize_outbound_headers = _BooleanConfigOption( + 'normalize_outbound_headers' + ) + validate_inbound_headers = _BooleanConfigOption( + 'validate_inbound_headers' + ) + normalize_inbound_headers = _BooleanConfigOption( + 'normalize_inbound_headers' + ) + + def __init__(self, + client_side=True, + header_encoding=None, + validate_outbound_headers=True, + normalize_outbound_headers=True, + validate_inbound_headers=True, + normalize_inbound_headers=True, + logger=None): + self.client_side = client_side + self.header_encoding = header_encoding + self.validate_outbound_headers = validate_outbound_headers + self.normalize_outbound_headers = normalize_outbound_headers + self.validate_inbound_headers = validate_inbound_headers + self.normalize_inbound_headers = normalize_inbound_headers + self.logger = logger or DummyLogger(__name__) + + @property + def header_encoding(self): + """ + Controls whether the headers emitted by this object in events are + transparently decoded to ``unicode`` strings, and what encoding is used + to do that decoding. This defaults to ``None``, meaning that headers + will be returned as bytes. To automatically decode headers (that is, to + return them as unicode strings), this can be set to the string name of + any encoding, e.g. ``'utf-8'``. + """ + return self._header_encoding + + @header_encoding.setter + def header_encoding(self, value): + """ + Enforces constraints on the value of header encoding. + """ + if not isinstance(value, (bool, str, type(None))): + raise ValueError("header_encoding must be bool, string, or None") + if value is True: + raise ValueError("header_encoding cannot be True") + self._header_encoding = value diff --git a/py3/lib/python3.6/site-packages/h2/connection.py b/py3/lib/python3.6/site-packages/h2/connection.py new file mode 100644 index 0000000..190c7fc --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/connection.py @@ -0,0 +1,2012 @@ +# -*- coding: utf-8 -*- +""" +h2/connection +~~~~~~~~~~~~~ + +An implementation of a HTTP/2 connection. +""" +import base64 + +from enum import Enum, IntEnum + +from hyperframe.exceptions import InvalidPaddingError +from hyperframe.frame import ( + GoAwayFrame, WindowUpdateFrame, HeadersFrame, DataFrame, PingFrame, + PushPromiseFrame, SettingsFrame, RstStreamFrame, PriorityFrame, + ContinuationFrame, AltSvcFrame, ExtensionFrame +) +from hpack.hpack import Encoder, Decoder +from hpack.exceptions import HPACKError, OversizedHeaderListError + +from .config import H2Configuration +from .errors import ErrorCodes, _error_code_from_int +from .events import ( + WindowUpdated, RemoteSettingsChanged, PingReceived, PingAckReceived, + SettingsAcknowledged, ConnectionTerminated, PriorityUpdated, + AlternativeServiceAvailable, UnknownFrameReceived +) +from .exceptions import ( + ProtocolError, NoSuchStreamError, FlowControlError, FrameTooLargeError, + TooManyStreamsError, StreamClosedError, StreamIDTooLowError, + NoAvailableStreamIDError, RFC1122Error, DenialOfServiceError +) +from .frame_buffer import FrameBuffer +from .settings import Settings, SettingCodes +from .stream import H2Stream, StreamClosedBy +from .utilities import SizeLimitDict, guard_increment_window +from .windows import WindowManager + + +class ConnectionState(Enum): + IDLE = 0 + CLIENT_OPEN = 1 + SERVER_OPEN = 2 + CLOSED = 3 + + +class ConnectionInputs(Enum): + SEND_HEADERS = 0 + SEND_PUSH_PROMISE = 1 + SEND_DATA = 2 + SEND_GOAWAY = 3 + SEND_WINDOW_UPDATE = 4 + SEND_PING = 5 + SEND_SETTINGS = 6 + SEND_RST_STREAM = 7 + SEND_PRIORITY = 8 + RECV_HEADERS = 9 + RECV_PUSH_PROMISE = 10 + RECV_DATA = 11 + RECV_GOAWAY = 12 + RECV_WINDOW_UPDATE = 13 + RECV_PING = 14 + RECV_SETTINGS = 15 + RECV_RST_STREAM = 16 + RECV_PRIORITY = 17 + SEND_ALTERNATIVE_SERVICE = 18 # Added in 2.3.0 + RECV_ALTERNATIVE_SERVICE = 19 # Added in 2.3.0 + + +class AllowedStreamIDs(IntEnum): + EVEN = 0 + ODD = 1 + + +class H2ConnectionStateMachine(object): + """ + A single HTTP/2 connection state machine. + + This state machine, while defined in its own class, is logically part of + the H2Connection class also defined in this file. The state machine itself + maintains very little state directly, instead focusing entirely on managing + state transitions. + """ + # For the purposes of this state machine we treat HEADERS and their + # associated CONTINUATION frames as a single jumbo frame. The protocol + # allows/requires this by preventing other frames from being interleved in + # between HEADERS/CONTINUATION frames. + # + # The _transitions dictionary contains a mapping of tuples of + # (state, input) to tuples of (side_effect_function, end_state). This map + # contains all allowed transitions: anything not in this map is invalid + # and immediately causes a transition to ``closed``. + + _transitions = { + # State: idle + (ConnectionState.IDLE, ConnectionInputs.SEND_HEADERS): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.IDLE, ConnectionInputs.RECV_HEADERS): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.IDLE, ConnectionInputs.SEND_SETTINGS): + (None, ConnectionState.IDLE), + (ConnectionState.IDLE, ConnectionInputs.RECV_SETTINGS): + (None, ConnectionState.IDLE), + (ConnectionState.IDLE, ConnectionInputs.SEND_WINDOW_UPDATE): + (None, ConnectionState.IDLE), + (ConnectionState.IDLE, ConnectionInputs.RECV_WINDOW_UPDATE): + (None, ConnectionState.IDLE), + (ConnectionState.IDLE, ConnectionInputs.SEND_PING): + (None, ConnectionState.IDLE), + (ConnectionState.IDLE, ConnectionInputs.RECV_PING): + (None, ConnectionState.IDLE), + (ConnectionState.IDLE, ConnectionInputs.SEND_GOAWAY): + (None, ConnectionState.CLOSED), + (ConnectionState.IDLE, ConnectionInputs.RECV_GOAWAY): + (None, ConnectionState.CLOSED), + (ConnectionState.IDLE, ConnectionInputs.SEND_PRIORITY): + (None, ConnectionState.IDLE), + (ConnectionState.IDLE, ConnectionInputs.RECV_PRIORITY): + (None, ConnectionState.IDLE), + (ConnectionState.IDLE, ConnectionInputs.SEND_ALTERNATIVE_SERVICE): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.IDLE, ConnectionInputs.RECV_ALTERNATIVE_SERVICE): + (None, ConnectionState.CLIENT_OPEN), + + # State: open, client side. + (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_HEADERS): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_DATA): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_GOAWAY): + (None, ConnectionState.CLOSED), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_WINDOW_UPDATE): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_PING): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_SETTINGS): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_PRIORITY): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_HEADERS): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PUSH_PROMISE): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_DATA): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_GOAWAY): + (None, ConnectionState.CLOSED), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_WINDOW_UPDATE): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PING): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_SETTINGS): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_RST_STREAM): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_RST_STREAM): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PRIORITY): + (None, ConnectionState.CLIENT_OPEN), + (ConnectionState.CLIENT_OPEN, + ConnectionInputs.RECV_ALTERNATIVE_SERVICE): + (None, ConnectionState.CLIENT_OPEN), + + # State: open, server side. + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_HEADERS): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PUSH_PROMISE): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_DATA): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_GOAWAY): + (None, ConnectionState.CLOSED), + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_WINDOW_UPDATE): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PING): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_SETTINGS): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PRIORITY): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_HEADERS): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_DATA): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_GOAWAY): + (None, ConnectionState.CLOSED), + (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_WINDOW_UPDATE): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_PING): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_SETTINGS): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_PRIORITY): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_RST_STREAM): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_RST_STREAM): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, + ConnectionInputs.SEND_ALTERNATIVE_SERVICE): + (None, ConnectionState.SERVER_OPEN), + (ConnectionState.SERVER_OPEN, + ConnectionInputs.RECV_ALTERNATIVE_SERVICE): + (None, ConnectionState.SERVER_OPEN), + + # State: closed + (ConnectionState.CLOSED, ConnectionInputs.SEND_GOAWAY): + (None, ConnectionState.CLOSED), + (ConnectionState.CLOSED, ConnectionInputs.RECV_GOAWAY): + (None, ConnectionState.CLOSED), + } + + def __init__(self): + self.state = ConnectionState.IDLE + + def process_input(self, input_): + """ + Process a specific input in the state machine. + """ + if not isinstance(input_, ConnectionInputs): + raise ValueError("Input must be an instance of ConnectionInputs") + + try: + func, target_state = self._transitions[(self.state, input_)] + except KeyError: + old_state = self.state + self.state = ConnectionState.CLOSED + raise ProtocolError( + "Invalid input %s in state %s" % (input_, old_state) + ) + else: + self.state = target_state + if func is not None: # pragma: no cover + return func() + + return [] + + +class H2Connection(object): + """ + A low-level HTTP/2 connection object. This handles building and receiving + frames and maintains both connection and per-stream state for all streams + on this connection. + + This wraps a HTTP/2 Connection state machine implementation, ensuring that + frames can only be sent/received when the connection is in a valid state. + It also builds stream state machines on demand to ensure that the + constraints of those state machines are met as well. Attempts to create + frames that cannot be sent will raise a ``ProtocolError``. + + .. versionchanged:: 2.3.0 + Added the ``header_encoding`` keyword argument. + + .. versionchanged:: 2.5.0 + Added the ``config`` keyword argument. Deprecated the ``client_side`` + and ``header_encoding`` parameters. + + .. versionchanged:: 3.0.0 + Removed deprecated parameters and properties. + + :param config: The configuration for the HTTP/2 connection. + + .. versionadded:: 2.5.0 + + :type config: :class:`H2Configuration ` + """ + # The initial maximum outbound frame size. This can be changed by receiving + # a settings frame. + DEFAULT_MAX_OUTBOUND_FRAME_SIZE = 65535 + + # The initial maximum inbound frame size. This is somewhat arbitrarily + # chosen. + DEFAULT_MAX_INBOUND_FRAME_SIZE = 2**24 + + # The highest acceptable stream ID. + HIGHEST_ALLOWED_STREAM_ID = 2**31 - 1 + + # The largest acceptable window increment. + MAX_WINDOW_INCREMENT = 2**31 - 1 + + # The initial default value of SETTINGS_MAX_HEADER_LIST_SIZE. + DEFAULT_MAX_HEADER_LIST_SIZE = 2**16 + + # Keep in memory limited amount of results for streams closes + MAX_CLOSED_STREAMS = 2**16 + + def __init__(self, config=None): + self.state_machine = H2ConnectionStateMachine() + self.streams = {} + self.highest_inbound_stream_id = 0 + self.highest_outbound_stream_id = 0 + self.encoder = Encoder() + self.decoder = Decoder() + + # This won't always actually do anything: for versions of HPACK older + # than 2.3.0 it does nothing. However, we have to try! + self.decoder.max_header_list_size = self.DEFAULT_MAX_HEADER_LIST_SIZE + + #: The configuration for this HTTP/2 connection object. + #: + #: .. versionadded:: 2.5.0 + self.config = config + if self.config is None: + self.config = H2Configuration( + client_side=True, + ) + + # Objects that store settings, including defaults. + # + # We set the MAX_CONCURRENT_STREAMS value to 100 because its default is + # unbounded, and that's a dangerous default because it allows + # essentially unbounded resources to be allocated regardless of how + # they will be used. 100 should be suitable for the average + # application. This default obviously does not apply to the remote + # peer's settings: the remote peer controls them! + # + # We also set MAX_HEADER_LIST_SIZE to a reasonable value. This is to + # advertise our defence against CVE-2016-6581. However, not all + # versions of HPACK will let us do it. That's ok: we should at least + # suggest that we're not vulnerable. + self.local_settings = Settings( + client=self.config.client_side, + initial_values={ + SettingCodes.MAX_CONCURRENT_STREAMS: 100, + SettingCodes.MAX_HEADER_LIST_SIZE: + self.DEFAULT_MAX_HEADER_LIST_SIZE, + } + ) + self.remote_settings = Settings(client=not self.config.client_side) + + # The current value of the connection flow control windows on the + # connection. + self.outbound_flow_control_window = ( + self.remote_settings.initial_window_size + ) + + #: The maximum size of a frame that can be emitted by this peer, in + #: bytes. + self.max_outbound_frame_size = self.remote_settings.max_frame_size + + #: The maximum size of a frame that can be received by this peer, in + #: bytes. + self.max_inbound_frame_size = self.local_settings.max_frame_size + + # Buffer for incoming data. + self.incoming_buffer = FrameBuffer(server=not self.config.client_side) + + # A private variable to store a sequence of received header frames + # until completion. + self._header_frames = [] + + # Data that needs to be sent. + self._data_to_send = bytearray() + + # Keeps track of how streams are closed. + # Used to ensure that we don't blow up in the face of frames that were + # in flight when a RST_STREAM was sent. + # Also used to determine whether we should consider a frame received + # while a stream is closed as either a stream error or a connection + # error. + self._closed_streams = SizeLimitDict( + size_limit=self.MAX_CLOSED_STREAMS + ) + + # The flow control window manager for the connection. + self._inbound_flow_control_window_manager = WindowManager( + max_window_size=self.local_settings.initial_window_size + ) + + # When in doubt use dict-dispatch. + self._frame_dispatch_table = { + HeadersFrame: self._receive_headers_frame, + PushPromiseFrame: self._receive_push_promise_frame, + SettingsFrame: self._receive_settings_frame, + DataFrame: self._receive_data_frame, + WindowUpdateFrame: self._receive_window_update_frame, + PingFrame: self._receive_ping_frame, + RstStreamFrame: self._receive_rst_stream_frame, + PriorityFrame: self._receive_priority_frame, + GoAwayFrame: self._receive_goaway_frame, + ContinuationFrame: self._receive_naked_continuation, + AltSvcFrame: self._receive_alt_svc_frame, + ExtensionFrame: self._receive_unknown_frame + } + + def _prepare_for_sending(self, frames): + if not frames: + return + self._data_to_send += b''.join(f.serialize() for f in frames) + assert all(f.body_len <= self.max_outbound_frame_size for f in frames) + + def _open_streams(self, remainder): + """ + A common method of counting number of open streams. Returns the number + of streams that are open *and* that have (stream ID % 2) == remainder. + While it iterates, also deletes any closed streams. + """ + count = 0 + to_delete = [] + + for stream_id, stream in self.streams.items(): + if stream.open and (stream_id % 2 == remainder): + count += 1 + elif stream.closed: + to_delete.append(stream_id) + + for stream_id in to_delete: + stream = self.streams.pop(stream_id) + self._closed_streams[stream_id] = stream.closed_by + + return count + + @property + def open_outbound_streams(self): + """ + The current number of open outbound streams. + """ + outbound_numbers = int(self.config.client_side) + return self._open_streams(outbound_numbers) + + @property + def open_inbound_streams(self): + """ + The current number of open inbound streams. + """ + inbound_numbers = int(not self.config.client_side) + return self._open_streams(inbound_numbers) + + @property + def inbound_flow_control_window(self): + """ + The size of the inbound flow control window for the connection. This is + rarely publicly useful: instead, use :meth:`remote_flow_control_window + `. This + shortcut is largely present to provide a shortcut to this data. + """ + return self._inbound_flow_control_window_manager.current_window_size + + def _begin_new_stream(self, stream_id, allowed_ids): + """ + Initiate a new stream. + + .. versionchanged:: 2.0.0 + Removed this function from the public API. + + :param stream_id: The ID of the stream to open. + :param allowed_ids: What kind of stream ID is allowed. + """ + self.config.logger.debug( + "Attempting to initiate stream ID %d", stream_id + ) + outbound = self._stream_id_is_outbound(stream_id) + highest_stream_id = ( + self.highest_outbound_stream_id if outbound else + self.highest_inbound_stream_id + ) + + if stream_id <= highest_stream_id: + raise StreamIDTooLowError(stream_id, highest_stream_id) + + if (stream_id % 2) != int(allowed_ids): + raise ProtocolError( + "Invalid stream ID for peer." + ) + + s = H2Stream( + stream_id, + config=self.config, + inbound_window_size=self.local_settings.initial_window_size, + outbound_window_size=self.remote_settings.initial_window_size + ) + self.config.logger.debug("Stream ID %d created", stream_id) + s.max_inbound_frame_size = self.max_inbound_frame_size + s.max_outbound_frame_size = self.max_outbound_frame_size + + self.streams[stream_id] = s + self.config.logger.debug("Current streams: %s", self.streams.keys()) + + if outbound: + self.highest_outbound_stream_id = stream_id + else: + self.highest_inbound_stream_id = stream_id + + return s + + def initiate_connection(self): + """ + Provides any data that needs to be sent at the start of the connection. + Must be called for both clients and servers. + """ + self.config.logger.debug("Initializing connection") + self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS) + if self.config.client_side: + preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n' + else: + preamble = b'' + + f = SettingsFrame(0) + for setting, value in self.local_settings.items(): + f.settings[setting] = value + self.config.logger.debug( + "Send Settings frame: %s", self.local_settings + ) + + self._data_to_send += preamble + f.serialize() + + def initiate_upgrade_connection(self, settings_header=None): + """ + Call to initialise the connection object for use with an upgraded + HTTP/2 connection (i.e. a connection negotiated using the + ``Upgrade: h2c`` HTTP header). + + This method differs from :meth:`initiate_connection + ` in several ways. + Firstly, it handles the additional SETTINGS frame that is sent in the + ``HTTP2-Settings`` header field. When called on a client connection, + this method will return a bytestring that the caller can put in the + ``HTTP2-Settings`` field they send on their initial request. When + called on a server connection, the user **must** provide the value they + received from the client in the ``HTTP2-Settings`` header field to the + ``settings_header`` argument, which will be used appropriately. + + Additionally, this method sets up stream 1 in a half-closed state + appropriate for this side of the connection, to reflect the fact that + the request is already complete. + + Finally, this method also prepares the appropriate preamble to be sent + after the upgrade. + + .. versionadded:: 2.3.0 + + :param settings_header: (optional, server-only): The value of the + ``HTTP2-Settings`` header field received from the client. + :type settings_header: ``bytes`` + + :returns: For clients, a bytestring to put in the ``HTTP2-Settings``. + For servers, returns nothing. + :rtype: ``bytes`` or ``None`` + """ + self.config.logger.debug( + "Upgrade connection. Current settings: %s", self.local_settings + ) + + frame_data = None + # Begin by getting the preamble in place. + self.initiate_connection() + + if self.config.client_side: + f = SettingsFrame(0) + for setting, value in self.local_settings.items(): + f.settings[setting] = value + + frame_data = f.serialize_body() + frame_data = base64.urlsafe_b64encode(frame_data) + elif settings_header: + # We have a settings header from the client. This needs to be + # applied, but we want to throw away the ACK. We do this by + # inserting the data into a Settings frame and then passing it to + # the state machine, but ignoring the return value. + settings_header = base64.urlsafe_b64decode(settings_header) + f = SettingsFrame(0) + f.parse_body(settings_header) + self._receive_settings_frame(f) + + # Set up appropriate state. Stream 1 in a half-closed state: + # half-closed(local) for clients, half-closed(remote) for servers. + # Additionally, we need to set up the Connection state machine. + connection_input = ( + ConnectionInputs.SEND_HEADERS if self.config.client_side + else ConnectionInputs.RECV_HEADERS + ) + self.config.logger.debug("Process input %s", connection_input) + self.state_machine.process_input(connection_input) + + # Set up stream 1. + self._begin_new_stream(stream_id=1, allowed_ids=AllowedStreamIDs.ODD) + self.streams[1].upgrade(self.config.client_side) + return frame_data + + def _get_or_create_stream(self, stream_id, allowed_ids): + """ + Gets a stream by its stream ID. Will create one if one does not already + exist. Use allowed_ids to circumvent the usual stream ID rules for + clients and servers. + + .. versionchanged:: 2.0.0 + Removed this function from the public API. + """ + try: + return self.streams[stream_id] + except KeyError: + return self._begin_new_stream(stream_id, allowed_ids) + + def _get_stream_by_id(self, stream_id): + """ + Gets a stream by its stream ID. Raises NoSuchStreamError if the stream + ID does not correspond to a known stream and is higher than the current + maximum: raises if it is lower than the current maximum. + + .. versionchanged:: 2.0.0 + Removed this function from the public API. + """ + try: + return self.streams[stream_id] + except KeyError: + outbound = self._stream_id_is_outbound(stream_id) + highest_stream_id = ( + self.highest_outbound_stream_id if outbound else + self.highest_inbound_stream_id + ) + + if stream_id > highest_stream_id: + raise NoSuchStreamError(stream_id) + else: + raise StreamClosedError(stream_id) + + def get_next_available_stream_id(self): + """ + Returns an integer suitable for use as the stream ID for the next + stream created by this endpoint. For server endpoints, this stream ID + will be even. For client endpoints, this stream ID will be odd. If no + stream IDs are available, raises :class:`NoAvailableStreamIDError + `. + + .. warning:: The return value from this function does not change until + the stream ID has actually been used by sending or pushing + headers on that stream. For that reason, it should be + called as close as possible to the actual use of the + stream ID. + + .. versionadded:: 2.0.0 + + :raises: :class:`NoAvailableStreamIDError + ` + :returns: The next free stream ID this peer can use to initiate a + stream. + :rtype: ``int`` + """ + # No streams have been opened yet, so return the lowest allowed stream + # ID. + if not self.highest_outbound_stream_id: + next_stream_id = 1 if self.config.client_side else 2 + else: + next_stream_id = self.highest_outbound_stream_id + 2 + self.config.logger.debug( + "Next available stream ID %d", next_stream_id + ) + if next_stream_id > self.HIGHEST_ALLOWED_STREAM_ID: + raise NoAvailableStreamIDError("Exhausted allowed stream IDs") + + return next_stream_id + + def send_headers(self, stream_id, headers, end_stream=False, + priority_weight=None, priority_depends_on=None, + priority_exclusive=None): + """ + Send headers on a given stream. + + This function can be used to send request or response headers: the kind + that are sent depends on whether this connection has been opened as a + client or server connection, and whether the stream was opened by the + remote peer or not. + + If this is a client connection, calling ``send_headers`` will send the + headers as a request. It will also implicitly open the stream being + used. If this is a client connection and ``send_headers`` has *already* + been called, this will send trailers instead. + + If this is a server connection, calling ``send_headers`` will send the + headers as a response. It is a protocol error for a server to open a + stream by sending headers. If this is a server connection and + ``send_headers`` has *already* been called, this will send trailers + instead. + + When acting as a server, you may call ``send_headers`` any number of + times allowed by the following rules, in this order: + + - zero or more times with ``(':status', '1XX')`` (where ``1XX`` is a + placeholder for any 100-level status code). + - once with any other status header. + - zero or one time for trailers. + + That is, you are allowed to send as many informational responses as you + like, followed by one complete response and zero or one HTTP trailer + blocks. + + Clients may send one or two header blocks: one request block, and + optionally one trailer block. + + If it is important to send HPACK "never indexed" header fields (as + defined in `RFC 7451 Section 7.1.3 + `_), the user may + instead provide headers using the HPACK library's :class:`HeaderTuple + ` and :class:`NeverIndexedHeaderTuple + ` objects. + + This method also allows users to prioritize the stream immediately, + by sending priority information on the HEADERS frame directly. To do + this, any one of ``priority_weight``, ``priority_depends_on``, or + ``priority_exclusive`` must be set to a value that is not ``None``. For + more information on the priority fields, see :meth:`prioritize + `. + + .. warning:: In HTTP/2, it is mandatory that all the HTTP/2 special + headers (that is, ones whose header keys begin with ``:``) appear + at the start of the header block, before any normal headers. + + .. versionchanged:: 2.3.0 + Added support for using :class:`HeaderTuple + ` objects to store headers. + + .. versionchanged:: 2.4.0 + Added the ability to provide priority keyword arguments: + ``priority_weight``, ``priority_depends_on``, and + ``priority_exclusive``. + + :param stream_id: The stream ID to send the headers on. If this stream + does not currently exist, it will be created. + :type stream_id: ``int`` + + :param headers: The request/response headers to send. + :type headers: An iterable of two tuples of bytestrings or + :class:`HeaderTuple ` objects. + + :param end_stream: Whether this headers frame should end the stream + immediately (that is, whether no more data will be sent after this + frame). Defaults to ``False``. + :type end_stream: ``bool`` + + :param priority_weight: Sets the priority weight of the stream. See + :meth:`prioritize ` for more + about how this field works. Defaults to ``None``, which means that + no priority information will be sent. + :type priority_weight: ``int`` or ``None`` + + :param priority_depends_on: Sets which stream this one depends on for + priority purposes. See :meth:`prioritize + ` for more about how this + field works. Defaults to ``None``, which means that no priority + information will be sent. + :type priority_depends_on: ``int`` or ``None`` + + :param priority_exclusive: Sets whether this stream exclusively depends + on the stream given in ``priority_depends_on`` for priority + purposes. See :meth:`prioritize + ` for more about how this + field workds. Defaults to ``None``, which means that no priority + information will be sent. + :type priority_depends_on: ``bool`` or ``None`` + + :returns: Nothing + """ + self.config.logger.debug( + "Send headers on stream ID %d", stream_id + ) + + # Check we can open the stream. + if stream_id not in self.streams: + max_open_streams = self.remote_settings.max_concurrent_streams + if (self.open_outbound_streams + 1) > max_open_streams: + raise TooManyStreamsError( + "Max outbound streams is %d, %d open" % + (max_open_streams, self.open_outbound_streams) + ) + + self.state_machine.process_input(ConnectionInputs.SEND_HEADERS) + stream = self._get_or_create_stream( + stream_id, AllowedStreamIDs(self.config.client_side) + ) + frames = stream.send_headers( + headers, self.encoder, end_stream + ) + + # We may need to send priority information. + priority_present = ( + (priority_weight is not None) or + (priority_depends_on is not None) or + (priority_exclusive is not None) + ) + + if priority_present: + if not self.config.client_side: + raise RFC1122Error("Servers SHOULD NOT prioritize streams.") + + headers_frame = frames[0] + headers_frame.flags.add('PRIORITY') + frames[0] = _add_frame_priority( + headers_frame, + priority_weight, + priority_depends_on, + priority_exclusive + ) + + self._prepare_for_sending(frames) + + def send_data(self, stream_id, data, end_stream=False, pad_length=None): + """ + Send data on a given stream. + + This method does no breaking up of data: if the data is larger than the + value returned by :meth:`local_flow_control_window + ` for this stream + then a :class:`FlowControlError ` will + be raised. If the data is larger than :data:`max_outbound_frame_size + ` then a + :class:`FrameTooLargeError ` will be + raised. + + Hyper-h2 does this to avoid buffering the data internally. If the user + has more data to send than hyper-h2 will allow, consider breaking it up + and buffering it externally. + + :param stream_id: The ID of the stream on which to send the data. + :type stream_id: ``int`` + :param data: The data to send on the stream. + :type data: ``bytes`` + :param end_stream: (optional) Whether this is the last data to be sent + on the stream. Defaults to ``False``. + :type end_stream: ``bool`` + :param pad_length: (optional) Length of the padding to apply to the + data frame. Defaults to ``None`` for no use of padding. Note that + a value of ``0`` results in padding of length ``0`` + (with the "padding" flag set on the frame). + + .. versionadded:: 2.6.0 + + :type pad_length: ``int`` + :returns: Nothing + """ + self.config.logger.debug( + "Send data on stream ID %d with len %d", stream_id, len(data) + ) + frame_size = len(data) + if pad_length is not None: + if not isinstance(pad_length, int): + raise TypeError("pad_length must be an int") + if pad_length < 0 or pad_length > 255: + raise ValueError("pad_length must be within range: [0, 255]") + # Account for padding bytes plus the 1-byte padding length field. + frame_size += pad_length + 1 + self.config.logger.debug( + "Frame size on stream ID %d is %d", stream_id, frame_size + ) + + if frame_size > self.local_flow_control_window(stream_id): + raise FlowControlError( + "Cannot send %d bytes, flow control window is %d." % + (frame_size, self.local_flow_control_window(stream_id)) + ) + elif frame_size > self.max_outbound_frame_size: + raise FrameTooLargeError( + "Cannot send frame size %d, max frame size is %d" % + (frame_size, self.max_outbound_frame_size) + ) + + self.state_machine.process_input(ConnectionInputs.SEND_DATA) + frames = self.streams[stream_id].send_data( + data, end_stream, pad_length=pad_length + ) + + self._prepare_for_sending(frames) + + self.outbound_flow_control_window -= frame_size + self.config.logger.debug( + "Outbound flow control window size is %d", + self.outbound_flow_control_window + ) + assert self.outbound_flow_control_window >= 0 + + def end_stream(self, stream_id): + """ + Cleanly end a given stream. + + This method ends a stream by sending an empty DATA frame on that stream + with the ``END_STREAM`` flag set. + + :param stream_id: The ID of the stream to end. + :type stream_id: ``int`` + :returns: Nothing + """ + self.config.logger.debug("End stream ID %d", stream_id) + self.state_machine.process_input(ConnectionInputs.SEND_DATA) + frames = self.streams[stream_id].end_stream() + self._prepare_for_sending(frames) + + def increment_flow_control_window(self, increment, stream_id=None): + """ + Increment a flow control window, optionally for a single stream. Allows + the remote peer to send more data. + + .. versionchanged:: 2.0.0 + Rejects attempts to increment the flow control window by out of + range values with a ``ValueError``. + + :param increment: The amount to increment the flow control window by. + :type increment: ``int`` + :param stream_id: (optional) The ID of the stream that should have its + flow control window opened. If not present or ``None``, the + connection flow control window will be opened instead. + :type stream_id: ``int`` or ``None`` + :returns: Nothing + :raises: ``ValueError`` + """ + if not (1 <= increment <= self.MAX_WINDOW_INCREMENT): + raise ValueError( + "Flow control increment must be between 1 and %d" % + self.MAX_WINDOW_INCREMENT + ) + + self.state_machine.process_input(ConnectionInputs.SEND_WINDOW_UPDATE) + + if stream_id is not None: + stream = self.streams[stream_id] + frames = stream.increase_flow_control_window( + increment + ) + + self.config.logger.debug( + "Increase stream ID %d flow control window by %d", + stream_id, increment + ) + else: + self._inbound_flow_control_window_manager.window_opened(increment) + f = WindowUpdateFrame(0) + f.window_increment = increment + frames = [f] + + self.config.logger.debug( + "Increase connection flow control window by %d", increment + ) + + self._prepare_for_sending(frames) + + def push_stream(self, stream_id, promised_stream_id, request_headers): + """ + Push a response to the client by sending a PUSH_PROMISE frame. + + If it is important to send HPACK "never indexed" header fields (as + defined in `RFC 7451 Section 7.1.3 + `_), the user may + instead provide headers using the HPACK library's :class:`HeaderTuple + ` and :class:`NeverIndexedHeaderTuple + ` objects. + + :param stream_id: The ID of the stream that this push is a response to. + :type stream_id: ``int`` + :param promised_stream_id: The ID of the stream that the pushed + response will be sent on. + :type promised_stream_id: ``int`` + :param request_headers: The headers of the request that the pushed + response will be responding to. + :type request_headers: An iterable of two tuples of bytestrings or + :class:`HeaderTuple ` objects. + :returns: Nothing + """ + self.config.logger.debug( + "Send Push Promise frame on stream ID %d", stream_id + ) + + if not self.remote_settings.enable_push: + raise ProtocolError("Remote peer has disabled stream push") + + self.state_machine.process_input(ConnectionInputs.SEND_PUSH_PROMISE) + stream = self._get_stream_by_id(stream_id) + + # We need to prevent users pushing streams in response to streams that + # they themselves have already pushed: see #163 and RFC 7540 § 6.6. The + # easiest way to do that is to assert that the stream_id is not even: + # this shortcut works because only servers can push and the state + # machine will enforce this. + if (stream_id % 2) == 0: + raise ProtocolError("Cannot recursively push streams.") + + new_stream = self._begin_new_stream( + promised_stream_id, AllowedStreamIDs.EVEN + ) + self.streams[promised_stream_id] = new_stream + + frames = stream.push_stream_in_band( + promised_stream_id, request_headers, self.encoder + ) + new_frames = new_stream.locally_pushed() + self._prepare_for_sending(frames + new_frames) + + def ping(self, opaque_data): + """ + Send a PING frame. + + :param opaque_data: A bytestring of length 8 that will be sent in the + PING frame. + :returns: Nothing + """ + self.config.logger.debug("Send Ping frame") + + if not isinstance(opaque_data, bytes) or len(opaque_data) != 8: + raise ValueError("Invalid value for ping data: %r" % opaque_data) + + self.state_machine.process_input(ConnectionInputs.SEND_PING) + f = PingFrame(0) + f.opaque_data = opaque_data + self._prepare_for_sending([f]) + + def reset_stream(self, stream_id, error_code=0): + """ + Reset a stream. + + This method forcibly closes a stream by sending a RST_STREAM frame for + a given stream. This is not a graceful closure. To gracefully end a + stream, try the :meth:`end_stream + ` method. + + :param stream_id: The ID of the stream to reset. + :type stream_id: ``int`` + :param error_code: (optional) The error code to use to reset the + stream. Defaults to :data:`ErrorCodes.NO_ERROR + `. + :type error_code: ``int`` + :returns: Nothing + """ + self.config.logger.debug("Reset stream ID %d", stream_id) + self.state_machine.process_input(ConnectionInputs.SEND_RST_STREAM) + stream = self._get_stream_by_id(stream_id) + frames = stream.reset_stream(error_code) + self._prepare_for_sending(frames) + + def close_connection(self, error_code=0, additional_data=None, + last_stream_id=None): + + """ + Close a connection, emitting a GOAWAY frame. + + .. versionchanged:: 2.4.0 + Added ``additional_data`` and ``last_stream_id`` arguments. + + :param error_code: (optional) The error code to send in the GOAWAY + frame. + :param additional_data: (optional) Additional debug data indicating + a reason for closing the connection. Must be a bytestring. + :param last_stream_id: (optional) The last stream which was processed + by the sender. Defaults to ``highest_inbound_stream_id``. + :returns: Nothing + """ + self.config.logger.debug("Close connection") + self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY) + + # Additional_data must be bytes + if additional_data is not None: + assert isinstance(additional_data, bytes) + + if last_stream_id is None: + last_stream_id = self.highest_inbound_stream_id + + f = GoAwayFrame( + stream_id=0, + last_stream_id=last_stream_id, + error_code=error_code, + additional_data=(additional_data or b'') + ) + self._prepare_for_sending([f]) + + def update_settings(self, new_settings): + """ + Update the local settings. This will prepare and emit the appropriate + SETTINGS frame. + + :param new_settings: A dictionary of {setting: new value} + """ + self.config.logger.debug( + "Update connection settings to %s", new_settings + ) + self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS) + self.local_settings.update(new_settings) + s = SettingsFrame(0) + s.settings = new_settings + self._prepare_for_sending([s]) + + def advertise_alternative_service(self, + field_value, + origin=None, + stream_id=None): + """ + Notify a client about an available Alternative Service. + + An Alternative Service is defined in `RFC 7838 + `_. An Alternative Service + notification informs a client that a given origin is also available + elsewhere. + + Alternative Services can be advertised in two ways. Firstly, they can + be advertised explicitly: that is, a server can say "origin X is also + available at Y". To advertise like this, set the ``origin`` argument + and not the ``stream_id`` argument. Alternatively, they can be + advertised implicitly: that is, a server can say "the origin you're + contacting on stream X is also available at Y". To advertise like this, + set the ``stream_id`` argument and not the ``origin`` argument. + + The explicit method of advertising can be done as long as the + connection is active. The implicit method can only be done after the + client has sent the request headers and before the server has sent the + response headers: outside of those points, Hyper-h2 will forbid sending + the Alternative Service advertisement by raising a ProtocolError. + + The ``field_value`` parameter is specified in RFC 7838. Hyper-h2 does + not validate or introspect this argument: the user is required to + ensure that it's well-formed. ``field_value`` corresponds to RFC 7838's + "Alternative Service Field Value". + + .. note:: It is strongly preferred to use the explicit method of + advertising Alternative Services. The implicit method of + advertising Alternative Services has a number of subtleties + and can lead to inconsistencies between the server and + client. Hyper-h2 allows both mechanisms, but caution is + strongly advised. + + .. versionadded:: 2.3.0 + + :param field_value: The RFC 7838 Alternative Service Field Value. This + argument is not introspected by Hyper-h2: the user is responsible + for ensuring that it is well-formed. + :type field_value: ``bytes`` + + :param origin: The origin/authority to which the Alternative Service + being advertised applies. Must not be provided at the same time as + ``stream_id``. + :type origin: ``bytes`` or ``None`` + + :param stream_id: The ID of the stream which was sent to the authority + for which this Alternative Service advertisement applies. Must not + be provided at the same time as ``origin``. + :type stream_id: ``int`` or ``None`` + + :returns: Nothing. + """ + if not isinstance(field_value, bytes): + raise ValueError("Field must be bytestring.") + + if origin is not None and stream_id is not None: + raise ValueError("Must not provide both origin and stream_id") + + self.state_machine.process_input( + ConnectionInputs.SEND_ALTERNATIVE_SERVICE + ) + + if origin is not None: + # This ALTSVC is sent on stream zero. + f = AltSvcFrame(stream_id=0) + f.origin = origin + f.field = field_value + frames = [f] + else: + stream = self._get_stream_by_id(stream_id) + frames = stream.advertise_alternative_service(field_value) + + self._prepare_for_sending(frames) + + def prioritize(self, stream_id, weight=None, depends_on=None, + exclusive=None): + """ + Notify a server about the priority of a stream. + + Stream priorities are a form of guidance to a remote server: they + inform the server about how important a given response is, so that the + server may allocate its resources (e.g. bandwidth, CPU time, etc.) + accordingly. This exists to allow clients to ensure that the most + important data arrives earlier, while less important data does not + starve out the more important data. + + Stream priorities are explained in depth in `RFC 7540 Section 5.3 + `_. + + This method updates the priority information of a single stream. It may + be called well before a stream is actively in use, or well after a + stream is closed. + + .. warning:: RFC 7540 allows for servers to change the priority of + streams. However, hyper-h2 **does not** allow server + stacks to do this. This is because most clients do not + adequately know how to respond when provided conflicting + priority information, and relatively little utility is + provided by making that functionality available. + + .. note:: hyper-h2 **does not** maintain any information about the + RFC 7540 priority tree. That means that hyper-h2 does not + prevent incautious users from creating invalid priority + trees, particularly by creating priority loops. While some + basic error checking is provided by hyper-h2, users are + strongly recommended to understand their prioritisation + strategies before using the priority tools here. + + .. note:: Priority information is strictly advisory. Servers are + allowed to disregard it entirely. Avoid relying on the idea + that your priority signaling will definitely be obeyed. + + .. versionadded:: 2.4.0 + + :param stream_id: The ID of the stream to prioritize. + :type stream_id: ``int`` + + :param weight: The weight to give the stream. Defaults to ``16``, the + default weight of any stream. May be any value between ``1`` and + ``256`` inclusive. The relative weight of a stream indicates what + proportion of available resources will be allocated to that + stream. + :type weight: ``int`` + + :param depends_on: The ID of the stream on which this stream depends. + This stream will only be progressed if it is impossible to + progress the parent stream (the one on which this one depends). + Passing the value ``0`` means that this stream does not depend on + any other. Defaults to ``0``. + :type depends_on: ``int`` + + :param exclusive: Whether this stream is an exclusive dependency of its + "parent" stream (i.e. the stream given by ``depends_on``). If a + stream is an exclusive dependency of another, that means that all + previously-set children of the parent are moved to become children + of the new exclusively-dependent stream. Defaults to ``False``. + :type exclusive: ``bool`` + """ + if not self.config.client_side: + raise RFC1122Error("Servers SHOULD NOT prioritize streams.") + + self.state_machine.process_input( + ConnectionInputs.SEND_PRIORITY + ) + + frame = PriorityFrame(stream_id) + frame = _add_frame_priority(frame, weight, depends_on, exclusive) + + self._prepare_for_sending([frame]) + + def local_flow_control_window(self, stream_id): + """ + Returns the maximum amount of data that can be sent on stream + ``stream_id``. + + This value will never be larger than the total data that can be sent on + the connection: even if the given stream allows more data, the + connection window provides a logical maximum to the amount of data that + can be sent. + + The maximum data that can be sent in a single data frame on a stream + is either this value, or the maximum frame size, whichever is + *smaller*. + + :param stream_id: The ID of the stream whose flow control window is + being queried. + :type stream_id: ``int`` + :returns: The amount of data in bytes that can be sent on the stream + before the flow control window is exhausted. + :rtype: ``int`` + """ + stream = self._get_stream_by_id(stream_id) + return min( + self.outbound_flow_control_window, + stream.outbound_flow_control_window + ) + + def remote_flow_control_window(self, stream_id): + """ + Returns the maximum amount of data the remote peer can send on stream + ``stream_id``. + + This value will never be larger than the total data that can be sent on + the connection: even if the given stream allows more data, the + connection window provides a logical maximum to the amount of data that + can be sent. + + The maximum data that can be sent in a single data frame on a stream + is either this value, or the maximum frame size, whichever is + *smaller*. + + :param stream_id: The ID of the stream whose flow control window is + being queried. + :type stream_id: ``int`` + :returns: The amount of data in bytes that can be received on the + stream before the flow control window is exhausted. + :rtype: ``int`` + """ + stream = self._get_stream_by_id(stream_id) + return min( + self.inbound_flow_control_window, + stream.inbound_flow_control_window + ) + + def acknowledge_received_data(self, acknowledged_size, stream_id): + """ + Inform the :class:`H2Connection ` that a + certain number of flow-controlled bytes have been processed, and that + the space should be handed back to the remote peer at an opportune + time. + + .. versionadded:: 2.5.0 + + :param acknowledged_size: The total *flow-controlled size* of the data + that has been processed. Note that this must include the amount of + padding that was sent with that data. + :type acknowledged_size: ``int`` + :param stream_id: The ID of the stream on which this data was received. + :type stream_id: ``int`` + :returns: Nothing + :rtype: ``None`` + """ + self.config.logger.debug( + "Ack received data on stream ID %d with size %d", + stream_id, acknowledged_size + ) + if stream_id <= 0: + raise ValueError( + "Stream ID %d is not valid for acknowledge_received_data" % + stream_id + ) + if acknowledged_size < 0: + raise ValueError("Cannot acknowledge negative data") + + frames = [] + + conn_manager = self._inbound_flow_control_window_manager + conn_increment = conn_manager.process_bytes(acknowledged_size) + if conn_increment: + f = WindowUpdateFrame(0) + f.window_increment = conn_increment + frames.append(f) + + try: + stream = self._get_stream_by_id(stream_id) + except StreamClosedError: + # The stream is already gone. We're not worried about incrementing + # the window in this case. + pass + else: + # No point incrementing the windows of closed streams. + if stream.open: + frames.extend( + stream.acknowledge_received_data(acknowledged_size) + ) + + self._prepare_for_sending(frames) + + def data_to_send(self, amount=None): + """ + Returns some data for sending out of the internal data buffer. + + This method is analogous to ``read`` on a file-like object, but it + doesn't block. Instead, it returns as much data as the user asks for, + or less if that much data is not available. It does not perform any + I/O, and so uses a different name. + + :param amount: (optional) The maximum amount of data to return. If not + set, or set to ``None``, will return as much data as possible. + :type amount: ``int`` + :returns: A bytestring containing the data to send on the wire. + :rtype: ``bytes`` + """ + if amount is None: + data = bytes(self._data_to_send) + self._data_to_send = bytearray() + return data + else: + data = bytes(self._data_to_send[:amount]) + self._data_to_send = self._data_to_send[amount:] + return data + + def clear_outbound_data_buffer(self): + """ + Clears the outbound data buffer, such that if this call was immediately + followed by a call to + :meth:`data_to_send `, that + call would return no data. + + This method should not normally be used, but is made available to avoid + exposing implementation details. + """ + self._data_to_send = bytearray() + + def _acknowledge_settings(self): + """ + Acknowledge settings that have been received. + + .. versionchanged:: 2.0.0 + Removed from public API, removed useless ``event`` parameter, made + automatic. + + :returns: Nothing + """ + self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS) + + changes = self.remote_settings.acknowledge() + + if SettingCodes.INITIAL_WINDOW_SIZE in changes: + setting = changes[SettingCodes.INITIAL_WINDOW_SIZE] + self._flow_control_change_from_settings( + setting.original_value, + setting.new_value, + ) + + # HEADER_TABLE_SIZE changes by the remote part affect our encoder: cf. + # RFC 7540 Section 6.5.2. + if SettingCodes.HEADER_TABLE_SIZE in changes: + setting = changes[SettingCodes.HEADER_TABLE_SIZE] + self.encoder.header_table_size = setting.new_value + + if SettingCodes.MAX_FRAME_SIZE in changes: + setting = changes[SettingCodes.MAX_FRAME_SIZE] + self.max_outbound_frame_size = setting.new_value + for stream in self.streams.values(): + stream.max_outbound_frame_size = setting.new_value + + f = SettingsFrame(0) + f.flags.add('ACK') + return [f] + + def _flow_control_change_from_settings(self, old_value, new_value): + """ + Update flow control windows in response to a change in the value of + SETTINGS_INITIAL_WINDOW_SIZE. + + When this setting is changed, it automatically updates all flow control + windows by the delta in the settings values. Note that it does not + increment the *connection* flow control window, per section 6.9.2 of + RFC 7540. + """ + delta = new_value - old_value + + for stream in self.streams.values(): + stream.outbound_flow_control_window = guard_increment_window( + stream.outbound_flow_control_window, + delta + ) + + def _inbound_flow_control_change_from_settings(self, old_value, new_value): + """ + Update remote flow control windows in response to a change in the value + of SETTINGS_INITIAL_WINDOW_SIZE. + + When this setting is changed, it automatically updates all remote flow + control windows by the delta in the settings values. + """ + delta = new_value - old_value + + for stream in self.streams.values(): + stream._inbound_flow_control_change_from_settings(delta) + + def receive_data(self, data): + """ + Pass some received HTTP/2 data to the connection for handling. + + :param data: The data received from the remote peer on the network. + :type data: ``bytes`` + :returns: A list of events that the remote peer triggered by sending + this data. + """ + self.config.logger.debug( + "Process received data on connection. Received data: %r", data + ) + + events = [] + self.incoming_buffer.add_data(data) + self.incoming_buffer.max_frame_size = self.max_inbound_frame_size + + try: + for frame in self.incoming_buffer: + events.extend(self._receive_frame(frame)) + except InvalidPaddingError: + self._terminate_connection(ErrorCodes.PROTOCOL_ERROR) + raise ProtocolError("Received frame with invalid padding.") + except ProtocolError as e: + # For whatever reason, receiving the frame caused a protocol error. + # We should prepare to emit a GoAway frame before throwing the + # exception up further. No need for an event: the exception will + # do fine. + self._terminate_connection(e.error_code) + raise + + return events + + def _receive_frame(self, frame): + """ + Handle a frame received on the connection. + + .. versionchanged:: 2.0.0 + Removed from the public API. + """ + try: + # I don't love using __class__ here, maybe reconsider it. + frames, events = self._frame_dispatch_table[frame.__class__](frame) + except StreamClosedError as e: + # If the stream was closed by RST_STREAM, we just send a RST_STREAM + # to the remote peer. Otherwise, this is a connection error, and so + # we will re-raise to trigger one. + if self._stream_is_closed_by_reset(e.stream_id): + f = RstStreamFrame(e.stream_id) + f.error_code = e.error_code + self._prepare_for_sending([f]) + events = e._events + else: + raise + except StreamIDTooLowError as e: + # The stream ID seems invalid. This may happen when the closed + # stream has been cleaned up, or when the remote peer has opened a + # new stream with a higher stream ID than this one, forcing it + # closed implicitly. + # + # Check how the stream was closed: depending on the mechanism, it + # is either a stream error or a connection error. + if self._stream_is_closed_by_reset(e.stream_id): + # Closed by RST_STREAM is a stream error. + f = RstStreamFrame(e.stream_id) + f.error_code = ErrorCodes.STREAM_CLOSED + self._prepare_for_sending([f]) + events = [] + elif self._stream_is_closed_by_end(e.stream_id): + # Closed by END_STREAM is a connection error. + raise StreamClosedError(e.stream_id) + else: + # Closed implicitly, also a connection error, but of type + # PROTOCOL_ERROR. + raise + else: + self._prepare_for_sending(frames) + + return events + + def _terminate_connection(self, error_code): + """ + Terminate the connection early. Used in error handling blocks to send + GOAWAY frames. + """ + f = GoAwayFrame(0) + f.last_stream_id = self.highest_inbound_stream_id + f.error_code = error_code + self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY) + self._prepare_for_sending([f]) + + def _receive_headers_frame(self, frame): + """ + Receive a headers frame on the connection. + """ + # If necessary, check we can open the stream. Also validate that the + # stream ID is valid. + if frame.stream_id not in self.streams: + max_open_streams = self.local_settings.max_concurrent_streams + if (self.open_inbound_streams + 1) > max_open_streams: + raise TooManyStreamsError( + "Max outbound streams is %d, %d open" % + (max_open_streams, self.open_outbound_streams) + ) + + # Let's decode the headers. We handle headers as bytes internally up + # until we hang them off the event, at which point we may optionally + # convert them to unicode. + headers = _decode_headers(self.decoder, frame.data) + + events = self.state_machine.process_input( + ConnectionInputs.RECV_HEADERS + ) + stream = self._get_or_create_stream( + frame.stream_id, AllowedStreamIDs(not self.config.client_side) + ) + frames, stream_events = stream.receive_headers( + headers, + 'END_STREAM' in frame.flags, + self.config.header_encoding + ) + + if 'PRIORITY' in frame.flags: + p_frames, p_events = self._receive_priority_frame(frame) + stream_events[0].priority_updated = p_events[0] + stream_events.extend(p_events) + assert not p_frames + + return frames, events + stream_events + + def _receive_push_promise_frame(self, frame): + """ + Receive a push-promise frame on the connection. + """ + if not self.local_settings.enable_push: + raise ProtocolError("Received pushed stream") + + pushed_headers = _decode_headers(self.decoder, frame.data) + + events = self.state_machine.process_input( + ConnectionInputs.RECV_PUSH_PROMISE + ) + + try: + stream = self._get_stream_by_id(frame.stream_id) + except NoSuchStreamError: + # We need to check if the parent stream was reset by us. If it was + # then we presume that the PUSH_PROMISE was in flight when we reset + # the parent stream. Rather than accept the new stream, just reset + # it. + # + # If this was closed naturally, however, we should call this a + # PROTOCOL_ERROR: pushing a stream on a naturally closed stream is + # a real problem because it creates a brand new stream that the + # remote peer now believes exists. + if (self._stream_closed_by(frame.stream_id) == + StreamClosedBy.SEND_RST_STREAM): + f = RstStreamFrame(frame.promised_stream_id) + f.error_code = ErrorCodes.REFUSED_STREAM + return [f], events + + raise ProtocolError("Attempted to push on closed stream.") + + # We need to prevent peers pushing streams in response to streams that + # they themselves have already pushed: see #163 and RFC 7540 § 6.6. The + # easiest way to do that is to assert that the stream_id is not even: + # this shortcut works because only servers can push and the state + # machine will enforce this. + if (frame.stream_id % 2) == 0: + raise ProtocolError("Cannot recursively push streams.") + + try: + frames, stream_events = stream.receive_push_promise_in_band( + frame.promised_stream_id, + pushed_headers, + self.config.header_encoding, + ) + except StreamClosedError: + # The parent stream was reset by us, so we presume that + # PUSH_PROMISE was in flight when we reset the parent stream. + # So we just reset the new stream. + f = RstStreamFrame(frame.promised_stream_id) + f.error_code = ErrorCodes.REFUSED_STREAM + return [f], events + + new_stream = self._begin_new_stream( + frame.promised_stream_id, AllowedStreamIDs.EVEN + ) + self.streams[frame.promised_stream_id] = new_stream + new_stream.remotely_pushed(pushed_headers) + + return frames, events + stream_events + + def _receive_data_frame(self, frame): + """ + Receive a data frame on the connection. + """ + flow_controlled_length = frame.flow_controlled_length + + events = self.state_machine.process_input( + ConnectionInputs.RECV_DATA + ) + self._inbound_flow_control_window_manager.window_consumed( + flow_controlled_length + ) + stream = self._get_stream_by_id(frame.stream_id) + frames, stream_events = stream.receive_data( + frame.data, + 'END_STREAM' in frame.flags, + flow_controlled_length + ) + return frames, events + stream_events + + def _receive_settings_frame(self, frame): + """ + Receive a SETTINGS frame on the connection. + """ + events = self.state_machine.process_input( + ConnectionInputs.RECV_SETTINGS + ) + + # This is an ack of the local settings. + if 'ACK' in frame.flags: + changed_settings = self._local_settings_acked() + ack_event = SettingsAcknowledged() + ack_event.changed_settings = changed_settings + events.append(ack_event) + return [], events + + # Add the new settings. + self.remote_settings.update(frame.settings) + events.append( + RemoteSettingsChanged.from_settings( + self.remote_settings, frame.settings + ) + ) + frames = self._acknowledge_settings() + + return frames, events + + def _receive_window_update_frame(self, frame): + """ + Receive a WINDOW_UPDATE frame on the connection. + """ + # Validate the frame. + if not (1 <= frame.window_increment <= self.MAX_WINDOW_INCREMENT): + raise ProtocolError( + "Flow control increment must be between 1 and %d, received %d" + % (self.MAX_WINDOW_INCREMENT, frame.window_increment) + ) + + events = self.state_machine.process_input( + ConnectionInputs.RECV_WINDOW_UPDATE + ) + + if frame.stream_id: + stream = self._get_stream_by_id(frame.stream_id) + frames, stream_events = stream.receive_window_update( + frame.window_increment + ) + else: + # Increment our local flow control window. + self.outbound_flow_control_window = guard_increment_window( + self.outbound_flow_control_window, + frame.window_increment + ) + + # FIXME: Should we split this into one event per active stream? + window_updated_event = WindowUpdated() + window_updated_event.stream_id = 0 + window_updated_event.delta = frame.window_increment + stream_events = [window_updated_event] + frames = [] + + return frames, events + stream_events + + def _receive_ping_frame(self, frame): + """ + Receive a PING frame on the connection. + """ + events = self.state_machine.process_input( + ConnectionInputs.RECV_PING + ) + flags = [] + + if 'ACK' in frame.flags: + evt = PingAckReceived() + else: + evt = PingReceived() + + # automatically ACK the PING with the same 'opaque data' + f = PingFrame(0) + f.flags = {'ACK'} + f.opaque_data = frame.opaque_data + flags.append(f) + + evt.ping_data = frame.opaque_data + events.append(evt) + + return flags, events + + def _receive_rst_stream_frame(self, frame): + """ + Receive a RST_STREAM frame on the connection. + """ + events = self.state_machine.process_input( + ConnectionInputs.RECV_RST_STREAM + ) + try: + stream = self._get_stream_by_id(frame.stream_id) + except NoSuchStreamError: + # The stream is missing. That's ok, we just do nothing here. + stream_frames = [] + stream_events = [] + else: + stream_frames, stream_events = stream.stream_reset(frame) + + return stream_frames, events + stream_events + + def _receive_priority_frame(self, frame): + """ + Receive a PRIORITY frame on the connection. + """ + events = self.state_machine.process_input( + ConnectionInputs.RECV_PRIORITY + ) + + event = PriorityUpdated() + event.stream_id = frame.stream_id + event.depends_on = frame.depends_on + event.exclusive = frame.exclusive + + # Weight is an integer between 1 and 256, but the byte only allows + # 0 to 255: add one. + event.weight = frame.stream_weight + 1 + + # A stream may not depend on itself. + if event.depends_on == frame.stream_id: + raise ProtocolError( + "Stream %d may not depend on itself" % frame.stream_id + ) + events.append(event) + + return [], events + + def _receive_goaway_frame(self, frame): + """ + Receive a GOAWAY frame on the connection. + """ + events = self.state_machine.process_input( + ConnectionInputs.RECV_GOAWAY + ) + + # Clear the outbound data buffer: we cannot send further data now. + self.clear_outbound_data_buffer() + + # Fire an appropriate ConnectionTerminated event. + new_event = ConnectionTerminated() + new_event.error_code = _error_code_from_int(frame.error_code) + new_event.last_stream_id = frame.last_stream_id + new_event.additional_data = (frame.additional_data + if frame.additional_data else None) + events.append(new_event) + + return [], events + + def _receive_naked_continuation(self, frame): + """ + A naked CONTINUATION frame has been received. This is always an error, + but the type of error it is depends on the state of the stream and must + transition the state of the stream, so we need to pass it to the + appropriate stream. + """ + stream = self._get_stream_by_id(frame.stream_id) + stream.receive_continuation() + assert False, "Should not be reachable" + + def _receive_alt_svc_frame(self, frame): + """ + An ALTSVC frame has been received. This frame, specified in RFC 7838, + is used to advertise alternative places where the same service can be + reached. + + This frame can optionally be received either on a stream or on stream + 0, and its semantics are different in each case. + """ + events = self.state_machine.process_input( + ConnectionInputs.RECV_ALTERNATIVE_SERVICE + ) + frames = [] + + if frame.stream_id: + # Given that it makes no sense to receive ALTSVC on a stream + # before that stream has been opened with a HEADERS frame, the + # ALTSVC frame cannot create a stream. If the stream is not + # present, we simply ignore the frame. + try: + stream = self._get_stream_by_id(frame.stream_id) + except (NoSuchStreamError, StreamClosedError): + pass + else: + stream_frames, stream_events = stream.receive_alt_svc(frame) + frames.extend(stream_frames) + events.extend(stream_events) + else: + # This frame is sent on stream 0. The origin field on the frame + # must be present, though if it isn't it's not a ProtocolError + # (annoyingly), we just need to ignore it. + if not frame.origin: + return frames, events + + # If we're a server, we want to ignore this (RFC 7838 says so). + if not self.config.client_side: + return frames, events + + event = AlternativeServiceAvailable() + event.origin = frame.origin + event.field_value = frame.field + events.append(event) + + return frames, events + + def _receive_unknown_frame(self, frame): + """ + We have received a frame that we do not understand. This is almost + certainly an extension frame, though it's impossible to be entirely + sure. + + RFC 7540 § 5.5 says that we MUST ignore unknown frame types: so we + do. We do notify the user that we received one, however. + """ + # All we do here is log. + self.config.logger.debug( + "Received unknown extension frame (ID %d)", frame.stream_id + ) + event = UnknownFrameReceived() + event.frame = frame + return [], [event] + + def _local_settings_acked(self): + """ + Handle the local settings being ACKed, update internal state. + """ + changes = self.local_settings.acknowledge() + + if SettingCodes.INITIAL_WINDOW_SIZE in changes: + setting = changes[SettingCodes.INITIAL_WINDOW_SIZE] + self._inbound_flow_control_change_from_settings( + setting.original_value, + setting.new_value, + ) + + if SettingCodes.MAX_HEADER_LIST_SIZE in changes: + setting = changes[SettingCodes.MAX_HEADER_LIST_SIZE] + self.decoder.max_header_list_size = setting.new_value + + if SettingCodes.MAX_FRAME_SIZE in changes: + setting = changes[SettingCodes.MAX_FRAME_SIZE] + self.max_inbound_frame_size = setting.new_value + + if SettingCodes.HEADER_TABLE_SIZE in changes: + setting = changes[SettingCodes.HEADER_TABLE_SIZE] + # This is safe across all hpack versions: some versions just won't + # respect it. + self.decoder.max_allowed_table_size = setting.new_value + + return changes + + def _stream_id_is_outbound(self, stream_id): + """ + Returns ``True`` if the stream ID corresponds to an outbound stream + (one initiated by this peer), returns ``False`` otherwise. + """ + return (stream_id % 2 == int(self.config.client_side)) + + def _stream_closed_by(self, stream_id): + """ + Returns how the stream was closed. + + The return value will be either a member of + ``h2.stream.StreamClosedBy`` or ``None``. If ``None``, the stream was + closed implicitly by the peer opening a stream with a higher stream ID + before opening this one. + """ + if stream_id in self.streams: + return self.streams[stream_id].closed_by + if stream_id in self._closed_streams: + return self._closed_streams[stream_id] + return None + + def _stream_is_closed_by_reset(self, stream_id): + """ + Returns ``True`` if the stream was closed by sending or receiving a + RST_STREAM frame. Returns ``False`` otherwise. + """ + return self._stream_closed_by(stream_id) in ( + StreamClosedBy.RECV_RST_STREAM, StreamClosedBy.SEND_RST_STREAM + ) + + def _stream_is_closed_by_end(self, stream_id): + """ + Returns ``True`` if the stream was closed by sending or receiving an + END_STREAM flag in a HEADERS or DATA frame. Returns ``False`` + otherwise. + """ + return self._stream_closed_by(stream_id) in ( + StreamClosedBy.RECV_END_STREAM, StreamClosedBy.SEND_END_STREAM + ) + + +def _add_frame_priority(frame, weight=None, depends_on=None, exclusive=None): + """ + Adds priority data to a given frame. Does not change any flags set on that + frame: if the caller is adding priority information to a HEADERS frame they + must set that themselves. + + This method also deliberately sets defaults for anything missing. + + This method validates the input values. + """ + # A stream may not depend on itself. + if depends_on == frame.stream_id: + raise ProtocolError( + "Stream %d may not depend on itself" % frame.stream_id + ) + + # Weight must be between 1 and 256. + if weight is not None: + if weight > 256 or weight < 1: + raise ProtocolError( + "Weight must be between 1 and 256, not %d" % weight + ) + else: + # Weight is an integer between 1 and 256, but the byte only allows + # 0 to 255: subtract one. + weight -= 1 + + # Set defaults for anything not provided. + weight = weight if weight is not None else 15 + depends_on = depends_on if depends_on is not None else 0 + exclusive = exclusive if exclusive is not None else False + + frame.stream_weight = weight + frame.depends_on = depends_on + frame.exclusive = exclusive + + return frame + + +def _decode_headers(decoder, encoded_header_block): + """ + Decode a HPACK-encoded header block, translating HPACK exceptions into + sensible hyper-h2 errors. + + This only ever returns bytestring headers: hyper-h2 may emit them as + unicode later, but internally it processes them as bytestrings only. + """ + try: + return decoder.decode(encoded_header_block, raw=True) + except OversizedHeaderListError as e: + # This is a symptom of a HPACK bomb attack: the user has + # disregarded our requirements on how large a header block we'll + # accept. + raise DenialOfServiceError("Oversized header block: %s" % e) + except (HPACKError, IndexError, TypeError, UnicodeDecodeError) as e: + # We should only need HPACKError here, but versions of HPACK older + # than 2.1.0 throw all three others as well. For maximum + # compatibility, catch all of them. + raise ProtocolError("Error decoding header block: %s" % e) diff --git a/py3/lib/python3.6/site-packages/h2/errors.py b/py3/lib/python3.6/site-packages/h2/errors.py new file mode 100644 index 0000000..baef200 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/errors.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +""" +h2/errors +~~~~~~~~~~~~~~~~~~~ + +Global error code registry containing the established HTTP/2 error codes. + +The current registry is available at: +https://tools.ietf.org/html/rfc7540#section-11.4 +""" +import enum + + +class ErrorCodes(enum.IntEnum): + """ + All known HTTP/2 error codes. + + .. versionadded:: 2.5.0 + """ + #: Graceful shutdown. + NO_ERROR = 0x0 + + #: Protocol error detected. + PROTOCOL_ERROR = 0x1 + + #: Implementation fault. + INTERNAL_ERROR = 0x2 + + #: Flow-control limits exceeded. + FLOW_CONTROL_ERROR = 0x3 + + #: Settings not acknowledged. + SETTINGS_TIMEOUT = 0x4 + + #: Frame received for closed stream. + STREAM_CLOSED = 0x5 + + #: Frame size incorrect. + FRAME_SIZE_ERROR = 0x6 + + #: Stream not processed. + REFUSED_STREAM = 0x7 + + #: Stream cancelled. + CANCEL = 0x8 + + #: Compression state not updated. + COMPRESSION_ERROR = 0x9 + + #: TCP connection error for CONNECT method. + CONNECT_ERROR = 0xa + + #: Processing capacity exceeded. + ENHANCE_YOUR_CALM = 0xb + + #: Negotiated TLS parameters not acceptable. + INADEQUATE_SECURITY = 0xc + + #: Use HTTP/1.1 for the request. + HTTP_1_1_REQUIRED = 0xd + + +def _error_code_from_int(code): + """ + Given an integer error code, returns either one of :class:`ErrorCodes + ` or, if not present in the known set of codes, + returns the integer directly. + """ + try: + return ErrorCodes(code) + except ValueError: + return code + + +__all__ = ['ErrorCodes'] diff --git a/py3/lib/python3.6/site-packages/h2/events.py b/py3/lib/python3.6/site-packages/h2/events.py new file mode 100644 index 0000000..a06c990 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/events.py @@ -0,0 +1,648 @@ +# -*- coding: utf-8 -*- +""" +h2/events +~~~~~~~~~ + +Defines Event types for HTTP/2. + +Events are returned by the H2 state machine to allow implementations to keep +track of events triggered by receiving data. Each time data is provided to the +H2 state machine it processes the data and returns a list of Event objects. +""" +import binascii + +from .settings import ChangedSetting, _setting_code_from_int + + +class Event(object): + """ + Base class for h2 events. + """ + pass + + +class RequestReceived(Event): + """ + The RequestReceived event is fired whenever request headers are received. + This event carries the HTTP headers for the given request and the stream ID + of the new stream. + + .. versionchanged:: 2.3.0 + Changed the type of ``headers`` to :class:`HeaderTuple + `. This has no effect on current users. + + .. versionchanged:: 2.4.0 + Added ``stream_ended`` and ``priority_updated`` properties. + """ + def __init__(self): + #: The Stream ID for the stream this request was made on. + self.stream_id = None + + #: The request headers. + self.headers = None + + #: If this request also ended the stream, the associated + #: :class:`StreamEnded ` event will be available + #: here. + #: + #: .. versionadded:: 2.4.0 + self.stream_ended = None + + #: If this request also had associated priority information, the + #: associated :class:`PriorityUpdated ` + #: event will be available here. + #: + #: .. versionadded:: 2.4.0 + self.priority_updated = None + + def __repr__(self): + return "" % ( + self.stream_id, self.headers + ) + + +class ResponseReceived(Event): + """ + The ResponseReceived event is fired whenever response headers are received. + This event carries the HTTP headers for the given response and the stream + ID of the new stream. + + .. versionchanged:: 2.3.0 + Changed the type of ``headers`` to :class:`HeaderTuple + `. This has no effect on current users. + + .. versionchanged:: 2.4.0 + Added ``stream_ended`` and ``priority_updated`` properties. + """ + def __init__(self): + #: The Stream ID for the stream this response was made on. + self.stream_id = None + + #: The response headers. + self.headers = None + + #: If this response also ended the stream, the associated + #: :class:`StreamEnded ` event will be available + #: here. + #: + #: .. versionadded:: 2.4.0 + self.stream_ended = None + + #: If this response also had associated priority information, the + #: associated :class:`PriorityUpdated ` + #: event will be available here. + #: + #: .. versionadded:: 2.4.0 + self.priority_updated = None + + def __repr__(self): + return "" % ( + self.stream_id, self.headers + ) + + +class TrailersReceived(Event): + """ + The TrailersReceived event is fired whenever trailers are received on a + stream. Trailers are a set of headers sent after the body of the + request/response, and are used to provide information that wasn't known + ahead of time (e.g. content-length). This event carries the HTTP header + fields that form the trailers and the stream ID of the stream on which they + were received. + + .. versionchanged:: 2.3.0 + Changed the type of ``headers`` to :class:`HeaderTuple + `. This has no effect on current users. + + .. versionchanged:: 2.4.0 + Added ``stream_ended`` and ``priority_updated`` properties. + """ + def __init__(self): + #: The Stream ID for the stream on which these trailers were received. + self.stream_id = None + + #: The trailers themselves. + self.headers = None + + #: Trailers always end streams. This property has the associated + #: :class:`StreamEnded ` in it. + #: + #: .. versionadded:: 2.4.0 + self.stream_ended = None + + #: If the trailers also set associated priority information, the + #: associated :class:`PriorityUpdated ` + #: event will be available here. + #: + #: .. versionadded:: 2.4.0 + self.priority_updated = None + + def __repr__(self): + return "" % ( + self.stream_id, self.headers + ) + + +class _HeadersSent(Event): + """ + The _HeadersSent event is fired whenever headers are sent. + + This is an internal event, used to determine validation steps on + outgoing header blocks. + """ + pass + + +class _ResponseSent(_HeadersSent): + """ + The _ResponseSent event is fired whenever response headers are sent + on a stream. + + This is an internal event, used to determine validation steps on + outgoing header blocks. + """ + pass + + +class _RequestSent(_HeadersSent): + """ + The _RequestSent event is fired whenever request headers are sent + on a stream. + + This is an internal event, used to determine validation steps on + outgoing header blocks. + """ + pass + + +class _TrailersSent(_HeadersSent): + """ + The _TrailersSent event is fired whenever trailers are sent on a + stream. Trailers are a set of headers sent after the body of the + request/response, and are used to provide information that wasn't known + ahead of time (e.g. content-length). + + This is an internal event, used to determine validation steps on + outgoing header blocks. + """ + pass + + +class _PushedRequestSent(_HeadersSent): + """ + The _PushedRequestSent event is fired whenever pushed request headers are + sent. + + This is an internal event, used to determine validation steps on outgoing + header blocks. + """ + pass + + +class InformationalResponseReceived(Event): + """ + The InformationalResponseReceived event is fired when an informational + response (that is, one whose status code is a 1XX code) is received from + the remote peer. + + The remote peer may send any number of these, from zero upwards. These + responses are most commonly sent in response to requests that have the + ``expect: 100-continue`` header field present. Most users can safely + ignore this event unless you are intending to use the + ``expect: 100-continue`` flow, or are for any reason expecting a different + 1XX status code. + + .. versionadded:: 2.2.0 + + .. versionchanged:: 2.3.0 + Changed the type of ``headers`` to :class:`HeaderTuple + `. This has no effect on current users. + + .. versionchanged:: 2.4.0 + Added ``priority_updated`` property. + """ + def __init__(self): + #: The Stream ID for the stream this informational response was made + #: on. + self.stream_id = None + + #: The headers for this informational response. + self.headers = None + + #: If this response also had associated priority information, the + #: associated :class:`PriorityUpdated ` + #: event will be available here. + #: + #: .. versionadded:: 2.4.0 + self.priority_updated = None + + def __repr__(self): + return "" % ( + self.stream_id, self.headers + ) + + +class DataReceived(Event): + """ + The DataReceived event is fired whenever data is received on a stream from + the remote peer. The event carries the data itself, and the stream ID on + which the data was received. + + .. versionchanged:: 2.4.0 + Added ``stream_ended`` property. + """ + def __init__(self): + #: The Stream ID for the stream this data was received on. + self.stream_id = None + + #: The data itself. + self.data = None + + #: The amount of data received that counts against the flow control + #: window. Note that padding counts against the flow control window, so + #: when adjusting flow control you should always use this field rather + #: than ``len(data)``. + self.flow_controlled_length = None + + #: If this data chunk also completed the stream, the associated + #: :class:`StreamEnded ` event will be available + #: here. + #: + #: .. versionadded:: 2.4.0 + self.stream_ended = None + + def __repr__(self): + return ( + "" % ( + self.stream_id, + self.flow_controlled_length, + _bytes_representation(self.data[:20]), + ) + ) + + +class WindowUpdated(Event): + """ + The WindowUpdated event is fired whenever a flow control window changes + size. HTTP/2 defines flow control windows for connections and streams: this + event fires for both connections and streams. The event carries the ID of + the stream to which it applies (set to zero if the window update applies to + the connection), and the delta in the window size. + """ + def __init__(self): + #: The Stream ID of the stream whose flow control window was changed. + #: May be ``0`` if the connection window was changed. + self.stream_id = None + + #: The window delta. + self.delta = None + + def __repr__(self): + return "" % ( + self.stream_id, self.delta + ) + + +class RemoteSettingsChanged(Event): + """ + The RemoteSettingsChanged event is fired whenever the remote peer changes + its settings. It contains a complete inventory of changed settings, + including their previous values. + + In HTTP/2, settings changes need to be acknowledged. hyper-h2 automatically + acknowledges settings changes for efficiency. However, it is possible that + the caller may not be happy with the changed setting. + + When this event is received, the caller should confirm that the new + settings are acceptable. If they are not acceptable, the user should close + the connection with the error code :data:`PROTOCOL_ERROR + `. + + .. versionchanged:: 2.0.0 + Prior to this version the user needed to acknowledge settings changes. + This is no longer the case: hyper-h2 now automatically acknowledges + them. + """ + def __init__(self): + #: A dictionary of setting byte to + #: :class:`ChangedSetting `, representing + #: the changed settings. + self.changed_settings = {} + + @classmethod + def from_settings(cls, old_settings, new_settings): + """ + Build a RemoteSettingsChanged event from a set of changed settings. + + :param old_settings: A complete collection of old settings, in the form + of a dictionary of ``{setting: value}``. + :param new_settings: All the changed settings and their new values, in + the form of a dictionary of ``{setting: value}``. + """ + e = cls() + for setting, new_value in new_settings.items(): + setting = _setting_code_from_int(setting) + original_value = old_settings.get(setting) + change = ChangedSetting(setting, original_value, new_value) + e.changed_settings[setting] = change + + return e + + def __repr__(self): + return "" % ( + ", ".join(repr(cs) for cs in self.changed_settings.values()), + ) + + +class PingReceived(Event): + """ + The PingReceived event is fired whenever a PING is received. It contains + the 'opaque data' of the PING frame. A ping acknowledgment with the same + 'opaque data' is automatically emitted after receiving a ping. + + .. versionadded:: 3.1.0 + """ + def __init__(self): + #: The data included on the ping. + self.ping_data = None + + def __repr__(self): + return "" % ( + _bytes_representation(self.ping_data), + ) + + +class PingAcknowledged(Event): + """ + Same as PingAckReceived. + + .. deprecated:: 3.1.0 + """ + def __init__(self): + #: The data included on the ping. + self.ping_data = None + + def __repr__(self): + return "" % ( + _bytes_representation(self.ping_data), + ) + + +class PingAckReceived(PingAcknowledged): + """ + The PingAckReceived event is fired whenever a PING acknowledgment is + received. It contains the 'opaque data' of the PING+ACK frame, allowing the + user to correlate PINGs and calculate RTT. + + .. versionadded:: 3.1.0 + """ + pass + + +class StreamEnded(Event): + """ + The StreamEnded event is fired whenever a stream is ended by a remote + party. The stream may not be fully closed if it has not been closed + locally, but no further data or headers should be expected on that stream. + """ + def __init__(self): + #: The Stream ID of the stream that was closed. + self.stream_id = None + + def __repr__(self): + return "" % self.stream_id + + +class StreamReset(Event): + """ + The StreamReset event is fired in two situations. The first is when the + remote party forcefully resets the stream. The second is when the remote + party has made a protocol error which only affects a single stream. In this + case, Hyper-h2 will terminate the stream early and return this event. + + .. versionchanged:: 2.0.0 + This event is now fired when Hyper-h2 automatically resets a stream. + """ + def __init__(self): + #: The Stream ID of the stream that was reset. + self.stream_id = None + + #: The error code given. Either one of :class:`ErrorCodes + #: ` or ``int`` + self.error_code = None + + #: Whether the remote peer sent a RST_STREAM or we did. + self.remote_reset = True + + def __repr__(self): + return "" % ( + self.stream_id, self.error_code, self.remote_reset + ) + + +class PushedStreamReceived(Event): + """ + The PushedStreamReceived event is fired whenever a pushed stream has been + received from a remote peer. The event carries on it the new stream ID, the + ID of the parent stream, and the request headers pushed by the remote peer. + """ + def __init__(self): + #: The Stream ID of the stream created by the push. + self.pushed_stream_id = None + + #: The Stream ID of the stream that the push is related to. + self.parent_stream_id = None + + #: The request headers, sent by the remote party in the push. + self.headers = None + + def __repr__(self): + return ( + "" % ( + self.pushed_stream_id, + self.parent_stream_id, + self.headers, + ) + ) + + +class SettingsAcknowledged(Event): + """ + The SettingsAcknowledged event is fired whenever a settings ACK is received + from the remote peer. The event carries on it the settings that were + acknowedged, in the same format as + :class:`h2.events.RemoteSettingsChanged`. + """ + def __init__(self): + #: A dictionary of setting byte to + #: :class:`ChangedSetting `, representing + #: the changed settings. + self.changed_settings = {} + + def __repr__(self): + return "" % ( + ", ".join(repr(cs) for cs in self.changed_settings.values()), + ) + + +class PriorityUpdated(Event): + """ + The PriorityUpdated event is fired whenever a stream sends updated priority + information. This can occur when the stream is opened, or at any time + during the stream lifetime. + + This event is purely advisory, and does not need to be acted on. + + .. versionadded:: 2.0.0 + """ + def __init__(self): + #: The ID of the stream whose priority information is being updated. + self.stream_id = None + + #: The new stream weight. May be the same as the original stream + #: weight. An integer between 1 and 256. + self.weight = None + + #: The stream ID this stream now depends on. May be ``0``. + self.depends_on = None + + #: Whether the stream *exclusively* depends on the parent stream. If it + #: does, this stream should inherit the current children of its new + #: parent. + self.exclusive = None + + def __repr__(self): + return ( + "" % ( + self.stream_id, + self.weight, + self.depends_on, + self.exclusive + ) + ) + + +class ConnectionTerminated(Event): + """ + The ConnectionTerminated event is fired when a connection is torn down by + the remote peer using a GOAWAY frame. Once received, no further action may + be taken on the connection: a new connection must be established. + """ + def __init__(self): + #: The error code cited when tearing down the connection. Should be + #: one of :class:`ErrorCodes `, but may not be if + #: unknown HTTP/2 extensions are being used. + self.error_code = None + + #: The stream ID of the last stream the remote peer saw. This can + #: provide an indication of what data, if any, never reached the remote + #: peer and so can safely be resent. + self.last_stream_id = None + + #: Additional debug data that can be appended to GOAWAY frame. + self.additional_data = None + + def __repr__(self): + return ( + "" % ( + self.error_code, + self.last_stream_id, + _bytes_representation( + self.additional_data[:20] + if self.additional_data else None) + ) + ) + + +class AlternativeServiceAvailable(Event): + """ + The AlternativeServiceAvailable event is fired when the remote peer + advertises an `RFC 7838 `_ Alternative + Service using an ALTSVC frame. + + This event always carries the origin to which the ALTSVC information + applies. That origin is either supplied by the server directly, or inferred + by hyper-h2 from the ``:authority`` pseudo-header field that was sent by + the user when initiating a given stream. + + This event also carries what RFC 7838 calls the "Alternative Service Field + Value", which is formatted like a HTTP header field and contains the + relevant alternative service information. Hyper-h2 does not parse or in any + way modify that information: the user is required to do that. + + This event can only be fired on the client end of a connection. + + .. versionadded:: 2.3.0 + """ + def __init__(self): + #: The origin to which the alternative service field value applies. + #: This field is either supplied by the server directly, or inferred by + #: hyper-h2 from the ``:authority`` pseudo-header field that was sent + #: by the user when initiating the stream on which the frame was + #: received. + self.origin = None + + #: The ALTSVC field value. This contains information about the HTTP + #: alternative service being advertised by the server. Hyper-h2 does + #: not parse this field: it is left exactly as sent by the server. The + #: structure of the data in this field is given by `RFC 7838 Section 3 + #: `_. + self.field_value = None + + def __repr__(self): + return ( + "" % ( + self.origin.decode('utf-8', 'ignore'), + self.field_value.decode('utf-8', 'ignore'), + ) + ) + + +class UnknownFrameReceived(Event): + """ + The UnknownFrameReceived event is fired when the remote peer sends a frame + that hyper-h2 does not understand. This occurs primarily when the remote + peer is employing HTTP/2 extensions that hyper-h2 doesn't know anything + about. + + RFC 7540 requires that HTTP/2 implementations ignore these frames. hyper-h2 + does so. However, this event is fired to allow implementations to perform + special processing on those frames if needed (e.g. if the implementation + is capable of handling the frame itself). + + .. versionadded:: 2.7.0 + """ + def __init__(self): + #: The hyperframe Frame object that encapsulates the received frame. + self.frame = None + + def __repr__(self): + return "" + + +def _bytes_representation(data): + """ + Converts a bytestring into something that is safe to print on all Python + platforms. + + This function is relatively expensive, so it should not be called on the + mainline of the code. It's safe to use in things like object repr methods + though. + """ + if data is None: + return None + + hex = binascii.hexlify(data) + + # This is moderately clever: on all Python versions hexlify returns a byte + # string. On Python 3 we want an actual string, so we just check whether + # that's what we have. + if not isinstance(hex, str): # pragma: no cover + hex = hex.decode('ascii') + + return hex diff --git a/py3/lib/python3.6/site-packages/h2/exceptions.py b/py3/lib/python3.6/site-packages/h2/exceptions.py new file mode 100644 index 0000000..388f9e9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/exceptions.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +""" +h2/exceptions +~~~~~~~~~~~~~ + +Exceptions for the HTTP/2 module. +""" +import h2.errors + + +class H2Error(Exception): + """ + The base class for all exceptions for the HTTP/2 module. + """ + + +class ProtocolError(H2Error): + """ + An action was attempted in violation of the HTTP/2 protocol. + """ + #: The error code corresponds to this kind of Protocol Error. + error_code = h2.errors.ErrorCodes.PROTOCOL_ERROR + + +class FrameTooLargeError(ProtocolError): + """ + The frame that we tried to send or that we received was too large. + """ + #: This error code that corresponds to this kind of Protocol Error. + error_code = h2.errors.ErrorCodes.FRAME_SIZE_ERROR + + +class FrameDataMissingError(ProtocolError): + """ + The frame that we received is missing some data. + + .. versionadded:: 2.0.0 + """ + #: The error code that corresponds to this kind of Protocol Error + error_code = h2.errors.ErrorCodes.FRAME_SIZE_ERROR + + +class TooManyStreamsError(ProtocolError): + """ + An attempt was made to open a stream that would lead to too many concurrent + streams. + """ + pass + + +class FlowControlError(ProtocolError): + """ + An attempted action violates flow control constraints. + """ + #: The error code that corresponds to this kind of + #: :class:`ProtocolError ` + error_code = h2.errors.ErrorCodes.FLOW_CONTROL_ERROR + + +class StreamIDTooLowError(ProtocolError): + """ + An attempt was made to open a stream that had an ID that is lower than the + highest ID we have seen on this connection. + """ + def __init__(self, stream_id, max_stream_id): + #: The ID of the stream that we attempted to open. + self.stream_id = stream_id + + #: The current highest-seen stream ID. + self.max_stream_id = max_stream_id + + def __str__(self): + return "StreamIDTooLowError: %d is lower than %d" % ( + self.stream_id, self.max_stream_id + ) + + +class NoAvailableStreamIDError(ProtocolError): + """ + There are no available stream IDs left to the connection. All stream IDs + have been exhausted. + + .. versionadded:: 2.0.0 + """ + pass + + +class NoSuchStreamError(ProtocolError): + """ + A stream-specific action referenced a stream that does not exist. + + .. versionchanged:: 2.0.0 + Became a subclass of :class:`ProtocolError + ` + """ + def __init__(self, stream_id): + #: The stream ID that corresponds to the non-existent stream. + self.stream_id = stream_id + + +class StreamClosedError(NoSuchStreamError): + """ + A more specific form of + :class:`NoSuchStreamError `. Indicates + that the stream has since been closed, and that all state relating to that + stream has been removed. + """ + def __init__(self, stream_id): + #: The stream ID that corresponds to the nonexistent stream. + self.stream_id = stream_id + + #: The relevant HTTP/2 error code. + self.error_code = h2.errors.ErrorCodes.STREAM_CLOSED + + # Any events that internal code may need to fire. Not relevant to + # external users that may receive a StreamClosedError. + self._events = [] + + +class InvalidSettingsValueError(ProtocolError, ValueError): + """ + An attempt was made to set an invalid Settings value. + + .. versionadded:: 2.0.0 + """ + def __init__(self, msg, error_code): + super(InvalidSettingsValueError, self).__init__(msg) + self.error_code = error_code + + +class InvalidBodyLengthError(ProtocolError): + """ + The remote peer sent more or less data that the Content-Length header + indicated. + + .. versionadded:: 2.0.0 + """ + def __init__(self, expected, actual): + self.expected_length = expected + self.actual_length = actual + + def __str__(self): + return "InvalidBodyLengthError: Expected %d bytes, received %d" % ( + self.expected_length, self.actual_length + ) + + +class UnsupportedFrameError(ProtocolError, KeyError): + """ + The remote peer sent a frame that is unsupported in this context. + + .. versionadded:: 2.1.0 + """ + # TODO: Remove the KeyError in 3.0.0 + pass + + +class RFC1122Error(H2Error): + """ + Emitted when users attempt to do something that is literally allowed by the + relevant RFC, but is sufficiently ill-defined that it's unwise to allow + users to actually do it. + + While there is some disagreement about whether or not we should be liberal + in what accept, it is a truth universally acknowledged that we should be + conservative in what emit. + + .. versionadded:: 2.4.0 + """ + # shazow says I'm going to regret naming the exception this way. If that + # turns out to be true, TELL HIM NOTHING. + pass + + +class DenialOfServiceError(ProtocolError): + """ + Emitted when the remote peer exhibits a behaviour that is likely to be an + attempt to perform a Denial of Service attack on the implementation. This + is a form of ProtocolError that carries a different error code, and allows + more easy detection of this kind of behaviour. + + .. versionadded:: 2.5.0 + """ + #: The error code that corresponds to this kind of + #: :class:`ProtocolError ` + error_code = h2.errors.ErrorCodes.ENHANCE_YOUR_CALM diff --git a/py3/lib/python3.6/site-packages/h2/frame_buffer.py b/py3/lib/python3.6/site-packages/h2/frame_buffer.py new file mode 100644 index 0000000..e79f6ec --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/frame_buffer.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +""" +h2/frame_buffer +~~~~~~~~~~~~~~~ + +A data structure that provides a way to iterate over a byte buffer in terms of +frames. +""" +from hyperframe.exceptions import InvalidFrameError +from hyperframe.frame import ( + Frame, HeadersFrame, ContinuationFrame, PushPromiseFrame +) + +from .exceptions import ( + ProtocolError, FrameTooLargeError, FrameDataMissingError +) + +# To avoid a DOS attack based on sending loads of continuation frames, we limit +# the maximum number we're perpared to receive. In this case, we'll set the +# limit to 64, which means the largest encoded header block we can receive by +# default is 262144 bytes long, and the largest possible *at all* is 1073741760 +# bytes long. +# +# This value seems reasonable for now, but in future we may want to evaluate +# making it configurable. +CONTINUATION_BACKLOG = 64 + + +class FrameBuffer(object): + """ + This is a data structure that expects to act as a buffer for HTTP/2 data + that allows iteraton in terms of H2 frames. + """ + def __init__(self, server=False): + self.data = b'' + self.max_frame_size = 0 + self._preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n' if server else b'' + self._preamble_len = len(self._preamble) + self._headers_buffer = [] + + def add_data(self, data): + """ + Add more data to the frame buffer. + + :param data: A bytestring containing the byte buffer. + """ + if self._preamble_len: + data_len = len(data) + of_which_preamble = min(self._preamble_len, data_len) + + if self._preamble[:of_which_preamble] != data[:of_which_preamble]: + raise ProtocolError("Invalid HTTP/2 preamble.") + + data = data[of_which_preamble:] + self._preamble_len -= of_which_preamble + self._preamble = self._preamble[of_which_preamble:] + + self.data += data + + def _parse_frame_header(self, data): + """ + Parses the frame header from the data. Either returns a tuple of + (frame, length), or throws an exception. The returned frame may be None + if the frame is of unknown type. + """ + try: + frame, length = Frame.parse_frame_header(data[:9]) + except ValueError as e: + # The frame header is invalid. This is a ProtocolError + raise ProtocolError("Invalid frame header received: %s" % str(e)) + + return frame, length + + def _validate_frame_length(self, length): + """ + Confirm that the frame is an appropriate length. + """ + if length > self.max_frame_size: + raise FrameTooLargeError( + "Received overlong frame: length %d, max %d" % + (length, self.max_frame_size) + ) + + def _update_header_buffer(self, f): + """ + Updates the internal header buffer. Returns a frame that should replace + the current one. May throw exceptions if this frame is invalid. + """ + # Check if we're in the middle of a headers block. If we are, this + # frame *must* be a CONTINUATION frame with the same stream ID as the + # leading HEADERS or PUSH_PROMISE frame. Anything else is a + # ProtocolError. If the frame *is* valid, append it to the header + # buffer. + if self._headers_buffer: + stream_id = self._headers_buffer[0].stream_id + valid_frame = ( + f is not None and + isinstance(f, ContinuationFrame) and + f.stream_id == stream_id + ) + if not valid_frame: + raise ProtocolError("Invalid frame during header block.") + + # Append the frame to the buffer. + self._headers_buffer.append(f) + if len(self._headers_buffer) > CONTINUATION_BACKLOG: + raise ProtocolError("Too many continuation frames received.") + + # If this is the end of the header block, then we want to build a + # mutant HEADERS frame that's massive. Use the original one we got, + # then set END_HEADERS and set its data appopriately. If it's not + # the end of the block, lose the current frame: we can't yield it. + if 'END_HEADERS' in f.flags: + f = self._headers_buffer[0] + f.flags.add('END_HEADERS') + f.data = b''.join(x.data for x in self._headers_buffer) + self._headers_buffer = [] + else: + f = None + elif (isinstance(f, (HeadersFrame, PushPromiseFrame)) and + 'END_HEADERS' not in f.flags): + # This is the start of a headers block! Save the frame off and then + # act like we didn't receive one. + self._headers_buffer.append(f) + f = None + + return f + + # The methods below support the iterator protocol. + def __iter__(self): + return self + + def next(self): # Python 2 + # First, check that we have enough data to successfully parse the + # next frame header. If not, bail. Otherwise, parse it. + if len(self.data) < 9: + raise StopIteration() + + try: + f, length = self._parse_frame_header(self.data) + except InvalidFrameError: # pragma: no cover + raise ProtocolError("Received frame with invalid frame header.") + + # Next, check that we have enough length to parse the frame body. If + # not, bail, leaving the frame header data in the buffer for next time. + if len(self.data) < length + 9: + raise StopIteration() + + # Confirm the frame has an appropriate length. + self._validate_frame_length(length) + + # Don't try to parse the body if we didn't get a frame we know about: + # there's nothing we can do with it anyway. + if f is not None: + try: + f.parse_body(memoryview(self.data[9:9+length])) + except InvalidFrameError: + raise FrameDataMissingError("Frame data missing or invalid") + + # At this point, as we know we'll use or discard the entire frame, we + # can update the data. + self.data = self.data[9+length:] + + # Pass the frame through the header buffer. + f = self._update_header_buffer(f) + + # If we got a frame we didn't understand or shouldn't yield, rather + # than return None it'd be better if we just tried to get the next + # frame in the sequence instead. Recurse back into ourselves to do + # that. This is safe because the amount of work we have to do here is + # strictly bounded by the length of the buffer. + return f if f is not None else self.next() + + def __next__(self): # Python 3 + return self.next() diff --git a/py3/lib/python3.6/site-packages/h2/settings.py b/py3/lib/python3.6/site-packages/h2/settings.py new file mode 100644 index 0000000..bf87c94 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/settings.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- +""" +h2/settings +~~~~~~~~~~~ + +This module contains a HTTP/2 settings object. This object provides a simple +API for manipulating HTTP/2 settings, keeping track of both the current active +state of the settings and the unacknowledged future values of the settings. +""" +import collections +import enum + +from hyperframe.frame import SettingsFrame + +from h2.errors import ErrorCodes +from h2.exceptions import InvalidSettingsValueError + +try: + from collections.abc import MutableMapping +except ImportError: # pragma: no cover + # Python 2.7 compatibility + from collections import MutableMapping + + +class SettingCodes(enum.IntEnum): + """ + All known HTTP/2 setting codes. + + .. versionadded:: 2.6.0 + """ + + #: Allows the sender to inform the remote endpoint of the maximum size of + #: the header compression table used to decode header blocks, in octets. + HEADER_TABLE_SIZE = SettingsFrame.HEADER_TABLE_SIZE + + #: This setting can be used to disable server push. To disable server push + #: on a client, set this to 0. + ENABLE_PUSH = SettingsFrame.ENABLE_PUSH + + #: Indicates the maximum number of concurrent streams that the sender will + #: allow. + MAX_CONCURRENT_STREAMS = SettingsFrame.MAX_CONCURRENT_STREAMS + + #: Indicates the sender's initial window size (in octets) for stream-level + #: flow control. + INITIAL_WINDOW_SIZE = SettingsFrame.INITIAL_WINDOW_SIZE + + #: Indicates the size of the largest frame payload that the sender is + #: willing to receive, in octets. + MAX_FRAME_SIZE = SettingsFrame.MAX_FRAME_SIZE + + #: This advisory setting informs a peer of the maximum size of header list + #: that the sender is prepared to accept, in octets. The value is based on + #: the uncompressed size of header fields, including the length of the name + #: and value in octets plus an overhead of 32 octets for each header field. + MAX_HEADER_LIST_SIZE = SettingsFrame.MAX_HEADER_LIST_SIZE + + #: This setting can be used to enable the connect protocol. To enable on a + #: client set this to 1. + ENABLE_CONNECT_PROTOCOL = SettingsFrame.ENABLE_CONNECT_PROTOCOL + + +def _setting_code_from_int(code): + """ + Given an integer setting code, returns either one of :class:`SettingCodes + ` or, if not present in the known set of codes, + returns the integer directly. + """ + try: + return SettingCodes(code) + except ValueError: + return code + + +class ChangedSetting: + + def __init__(self, setting, original_value, new_value): + #: The setting code given. Either one of :class:`SettingCodes + #: ` or ``int`` + #: + #: .. versionchanged:: 2.6.0 + self.setting = setting + + #: The original value before being changed. + self.original_value = original_value + + #: The new value after being changed. + self.new_value = new_value + + def __repr__(self): + return ( + "ChangedSetting(setting=%s, original_value=%s, " + "new_value=%s)" + ) % ( + self.setting, + self.original_value, + self.new_value + ) + + +class Settings(MutableMapping): + """ + An object that encapsulates HTTP/2 settings state. + + HTTP/2 Settings are a complex beast. Each party, remote and local, has its + own settings and a view of the other party's settings. When a settings + frame is emitted by a peer it cannot assume that the new settings values + are in place until the remote peer acknowledges the setting. In principle, + multiple settings changes can be "in flight" at the same time, all with + different values. + + This object encapsulates this mess. It provides a dict-like interface to + settings, which return the *current* values of the settings in question. + Additionally, it keeps track of the stack of proposed values: each time an + acknowledgement is sent/received, it updates the current values with the + stack of proposed values. On top of all that, it validates the values to + make sure they're allowed, and raises :class:`InvalidSettingsValueError + ` if they are not. + + Finally, this object understands what the default values of the HTTP/2 + settings are, and sets those defaults appropriately. + + .. versionchanged:: 2.2.0 + Added the ``initial_values`` parameter. + + .. versionchanged:: 2.5.0 + Added the ``max_header_list_size`` property. + + :param client: (optional) Whether these settings should be defaulted for a + client implementation or a server implementation. Defaults to ``True``. + :type client: ``bool`` + :param initial_values: (optional) Any initial values the user would like + set, rather than RFC 7540's defaults. + :type initial_vales: ``MutableMapping`` + """ + def __init__(self, client=True, initial_values=None): + # Backing object for the settings. This is a dictionary of + # (setting: [list of values]), where the first value in the list is the + # current value of the setting. Strictly this doesn't use lists but + # instead uses collections.deque to avoid repeated memory allocations. + # + # This contains the default values for HTTP/2. + self._settings = { + SettingCodes.HEADER_TABLE_SIZE: collections.deque([4096]), + SettingCodes.ENABLE_PUSH: collections.deque([int(client)]), + SettingCodes.INITIAL_WINDOW_SIZE: collections.deque([65535]), + SettingCodes.MAX_FRAME_SIZE: collections.deque([16384]), + SettingCodes.ENABLE_CONNECT_PROTOCOL: collections.deque([0]), + } + if initial_values is not None: + for key, value in initial_values.items(): + invalid = _validate_setting(key, value) + if invalid: + raise InvalidSettingsValueError( + "Setting %d has invalid value %d" % (key, value), + error_code=invalid + ) + self._settings[key] = collections.deque([value]) + + def acknowledge(self): + """ + The settings have been acknowledged, either by the user (remote + settings) or by the remote peer (local settings). + + :returns: A dict of {setting: ChangedSetting} that were applied. + """ + changed_settings = {} + + # If there is more than one setting in the list, we have a setting + # value outstanding. Update them. + for k, v in self._settings.items(): + if len(v) > 1: + old_setting = v.popleft() + new_setting = v[0] + changed_settings[k] = ChangedSetting( + k, old_setting, new_setting + ) + + return changed_settings + + # Provide easy-access to well known settings. + @property + def header_table_size(self): + """ + The current value of the :data:`HEADER_TABLE_SIZE + ` setting. + """ + return self[SettingCodes.HEADER_TABLE_SIZE] + + @header_table_size.setter + def header_table_size(self, value): + self[SettingCodes.HEADER_TABLE_SIZE] = value + + @property + def enable_push(self): + """ + The current value of the :data:`ENABLE_PUSH + ` setting. + """ + return self[SettingCodes.ENABLE_PUSH] + + @enable_push.setter + def enable_push(self, value): + self[SettingCodes.ENABLE_PUSH] = value + + @property + def initial_window_size(self): + """ + The current value of the :data:`INITIAL_WINDOW_SIZE + ` setting. + """ + return self[SettingCodes.INITIAL_WINDOW_SIZE] + + @initial_window_size.setter + def initial_window_size(self, value): + self[SettingCodes.INITIAL_WINDOW_SIZE] = value + + @property + def max_frame_size(self): + """ + The current value of the :data:`MAX_FRAME_SIZE + ` setting. + """ + return self[SettingCodes.MAX_FRAME_SIZE] + + @max_frame_size.setter + def max_frame_size(self, value): + self[SettingCodes.MAX_FRAME_SIZE] = value + + @property + def max_concurrent_streams(self): + """ + The current value of the :data:`MAX_CONCURRENT_STREAMS + ` setting. + """ + return self.get(SettingCodes.MAX_CONCURRENT_STREAMS, 2**32+1) + + @max_concurrent_streams.setter + def max_concurrent_streams(self, value): + self[SettingCodes.MAX_CONCURRENT_STREAMS] = value + + @property + def max_header_list_size(self): + """ + The current value of the :data:`MAX_HEADER_LIST_SIZE + ` setting. If not set, + returns ``None``, which means unlimited. + + .. versionadded:: 2.5.0 + """ + return self.get(SettingCodes.MAX_HEADER_LIST_SIZE, None) + + @max_header_list_size.setter + def max_header_list_size(self, value): + self[SettingCodes.MAX_HEADER_LIST_SIZE] = value + + @property + def enable_connect_protocol(self): + """ + The current value of the :data:`ENABLE_CONNECT_PROTOCOL + ` setting. + """ + return self[SettingCodes.ENABLE_CONNECT_PROTOCOL] + + @enable_connect_protocol.setter + def enable_connect_protocol(self, value): + self[SettingCodes.ENABLE_CONNECT_PROTOCOL] = value + + # Implement the MutableMapping API. + def __getitem__(self, key): + val = self._settings[key][0] + + # Things that were created when a setting was received should stay + # KeyError'd. + if val is None: + raise KeyError + + return val + + def __setitem__(self, key, value): + invalid = _validate_setting(key, value) + if invalid: + raise InvalidSettingsValueError( + "Setting %d has invalid value %d" % (key, value), + error_code=invalid + ) + + try: + items = self._settings[key] + except KeyError: + items = collections.deque([None]) + self._settings[key] = items + + items.append(value) + + def __delitem__(self, key): + del self._settings[key] + + def __iter__(self): + return self._settings.__iter__() + + def __len__(self): + return len(self._settings) + + def __eq__(self, other): + if isinstance(other, Settings): + return self._settings == other._settings + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, Settings): + return not self == other + else: + return NotImplemented + + +def _validate_setting(setting, value): # noqa: C901 + """ + Confirms that a specific setting has a well-formed value. If the setting is + invalid, returns an error code. Otherwise, returns 0 (NO_ERROR). + """ + if setting == SettingCodes.ENABLE_PUSH: + if value not in (0, 1): + return ErrorCodes.PROTOCOL_ERROR + elif setting == SettingCodes.INITIAL_WINDOW_SIZE: + if not 0 <= value <= 2147483647: # 2^31 - 1 + return ErrorCodes.FLOW_CONTROL_ERROR + elif setting == SettingCodes.MAX_FRAME_SIZE: + if not 16384 <= value <= 16777215: # 2^14 and 2^24 - 1 + return ErrorCodes.PROTOCOL_ERROR + elif setting == SettingCodes.MAX_HEADER_LIST_SIZE: + if value < 0: + return ErrorCodes.PROTOCOL_ERROR + elif setting == SettingCodes.ENABLE_CONNECT_PROTOCOL: + if value not in (0, 1): + return ErrorCodes.PROTOCOL_ERROR + + return 0 diff --git a/py3/lib/python3.6/site-packages/h2/stream.py b/py3/lib/python3.6/site-packages/h2/stream.py new file mode 100644 index 0000000..1cb9178 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/stream.py @@ -0,0 +1,1369 @@ +# -*- coding: utf-8 -*- +""" +h2/stream +~~~~~~~~~ + +An implementation of a HTTP/2 stream. +""" +from enum import Enum, IntEnum +from hpack import HeaderTuple +from hyperframe.frame import ( + HeadersFrame, ContinuationFrame, DataFrame, WindowUpdateFrame, + RstStreamFrame, PushPromiseFrame, AltSvcFrame +) + +from .errors import ErrorCodes, _error_code_from_int +from .events import ( + RequestReceived, ResponseReceived, DataReceived, WindowUpdated, + StreamEnded, PushedStreamReceived, StreamReset, TrailersReceived, + InformationalResponseReceived, AlternativeServiceAvailable, + _ResponseSent, _RequestSent, _TrailersSent, _PushedRequestSent +) +from .exceptions import ( + ProtocolError, StreamClosedError, InvalidBodyLengthError, FlowControlError +) +from .utilities import ( + guard_increment_window, is_informational_response, authority_from_headers, + validate_headers, validate_outbound_headers, normalize_outbound_headers, + HeaderValidationFlags, extract_method_header, normalize_inbound_headers +) +from .windows import WindowManager + + +class StreamState(IntEnum): + IDLE = 0 + RESERVED_REMOTE = 1 + RESERVED_LOCAL = 2 + OPEN = 3 + HALF_CLOSED_REMOTE = 4 + HALF_CLOSED_LOCAL = 5 + CLOSED = 6 + + +class StreamInputs(Enum): + SEND_HEADERS = 0 + SEND_PUSH_PROMISE = 1 + SEND_RST_STREAM = 2 + SEND_DATA = 3 + SEND_WINDOW_UPDATE = 4 + SEND_END_STREAM = 5 + RECV_HEADERS = 6 + RECV_PUSH_PROMISE = 7 + RECV_RST_STREAM = 8 + RECV_DATA = 9 + RECV_WINDOW_UPDATE = 10 + RECV_END_STREAM = 11 + RECV_CONTINUATION = 12 # Added in 2.0.0 + SEND_INFORMATIONAL_HEADERS = 13 # Added in 2.2.0 + RECV_INFORMATIONAL_HEADERS = 14 # Added in 2.2.0 + SEND_ALTERNATIVE_SERVICE = 15 # Added in 2.3.0 + RECV_ALTERNATIVE_SERVICE = 16 # Added in 2.3.0 + UPGRADE_CLIENT = 17 # Added 2.3.0 + UPGRADE_SERVER = 18 # Added 2.3.0 + + +class StreamClosedBy(Enum): + SEND_END_STREAM = 0 + RECV_END_STREAM = 1 + SEND_RST_STREAM = 2 + RECV_RST_STREAM = 3 + + +# This array is initialized once, and is indexed by the stream states above. +# It indicates whether a stream in the given state is open. The reason we do +# this is that we potentially check whether a stream in a given state is open +# quite frequently: given that we check so often, we should do so in the +# fastest and most performant way possible. +STREAM_OPEN = [False for _ in range(0, len(StreamState))] +STREAM_OPEN[StreamState.OPEN] = True +STREAM_OPEN[StreamState.HALF_CLOSED_LOCAL] = True +STREAM_OPEN[StreamState.HALF_CLOSED_REMOTE] = True + + +class H2StreamStateMachine(object): + """ + A single HTTP/2 stream state machine. + + This stream object implements basically the state machine described in + RFC 7540 section 5.1. + + :param stream_id: The stream ID of this stream. This is stored primarily + for logging purposes. + """ + def __init__(self, stream_id): + self.state = StreamState.IDLE + self.stream_id = stream_id + + #: Whether this peer is the client side of this stream. + self.client = None + + # Whether trailers have been sent/received on this stream or not. + self.headers_sent = None + self.trailers_sent = None + self.headers_received = None + self.trailers_received = None + + # How the stream was closed. One of StreamClosedBy. + self.stream_closed_by = None + + def process_input(self, input_): + """ + Process a specific input in the state machine. + """ + if not isinstance(input_, StreamInputs): + raise ValueError("Input must be an instance of StreamInputs") + + try: + func, target_state = _transitions[(self.state, input_)] + except KeyError: + old_state = self.state + self.state = StreamState.CLOSED + raise ProtocolError( + "Invalid input %s in state %s" % (input_, old_state) + ) + else: + previous_state = self.state + self.state = target_state + if func is not None: + try: + return func(self, previous_state) + except ProtocolError: + self.state = StreamState.CLOSED + raise + except AssertionError as e: # pragma: no cover + self.state = StreamState.CLOSED + raise ProtocolError(e) + + return [] + + def request_sent(self, previous_state): + """ + Fires when a request is sent. + """ + self.client = True + self.headers_sent = True + event = _RequestSent() + + return [event] + + def response_sent(self, previous_state): + """ + Fires when something that should be a response is sent. This 'response' + may actually be trailers. + """ + if not self.headers_sent: + if self.client is True or self.client is None: + raise ProtocolError("Client cannot send responses.") + self.headers_sent = True + event = _ResponseSent() + else: + assert not self.trailers_sent + self.trailers_sent = True + event = _TrailersSent() + + return [event] + + def request_received(self, previous_state): + """ + Fires when a request is received. + """ + assert not self.headers_received + assert not self.trailers_received + + self.client = False + self.headers_received = True + event = RequestReceived() + + event.stream_id = self.stream_id + return [event] + + def response_received(self, previous_state): + """ + Fires when a response is received. Also disambiguates between responses + and trailers. + """ + if not self.headers_received: + assert self.client is True + self.headers_received = True + event = ResponseReceived() + else: + assert not self.trailers_received + self.trailers_received = True + event = TrailersReceived() + + event.stream_id = self.stream_id + return [event] + + def data_received(self, previous_state): + """ + Fires when data is received. + """ + event = DataReceived() + event.stream_id = self.stream_id + return [event] + + def window_updated(self, previous_state): + """ + Fires when a window update frame is received. + """ + event = WindowUpdated() + event.stream_id = self.stream_id + return [event] + + def stream_half_closed(self, previous_state): + """ + Fires when an END_STREAM flag is received in the OPEN state, + transitioning this stream to a HALF_CLOSED_REMOTE state. + """ + event = StreamEnded() + event.stream_id = self.stream_id + return [event] + + def stream_ended(self, previous_state): + """ + Fires when a stream is cleanly ended. + """ + self.stream_closed_by = StreamClosedBy.RECV_END_STREAM + event = StreamEnded() + event.stream_id = self.stream_id + return [event] + + def stream_reset(self, previous_state): + """ + Fired when a stream is forcefully reset. + """ + self.stream_closed_by = StreamClosedBy.RECV_RST_STREAM + event = StreamReset() + event.stream_id = self.stream_id + return [event] + + def send_new_pushed_stream(self, previous_state): + """ + Fires on the newly pushed stream, when pushed by the local peer. + + No event here, but definitionally this peer must be a server. + """ + assert self.client is None + self.client = False + self.headers_received = True + return [] + + def recv_new_pushed_stream(self, previous_state): + """ + Fires on the newly pushed stream, when pushed by the remote peer. + + No event here, but definitionally this peer must be a client. + """ + assert self.client is None + self.client = True + self.headers_sent = True + return [] + + def send_push_promise(self, previous_state): + """ + Fires on the already-existing stream when a PUSH_PROMISE frame is sent. + We may only send PUSH_PROMISE frames if we're a server. + """ + if self.client is True: + raise ProtocolError("Cannot push streams from client peers.") + + event = _PushedRequestSent() + return [event] + + def recv_push_promise(self, previous_state): + """ + Fires on the already-existing stream when a PUSH_PROMISE frame is + received. We may only receive PUSH_PROMISE frames if we're a client. + + Fires a PushedStreamReceived event. + """ + if not self.client: + if self.client is None: # pragma: no cover + msg = "Idle streams cannot receive pushes" + else: # pragma: no cover + msg = "Cannot receive pushed streams as a server" + raise ProtocolError(msg) + + event = PushedStreamReceived() + event.parent_stream_id = self.stream_id + return [event] + + def send_end_stream(self, previous_state): + """ + Called when an attempt is made to send END_STREAM in the + HALF_CLOSED_REMOTE state. + """ + self.stream_closed_by = StreamClosedBy.SEND_END_STREAM + + def send_reset_stream(self, previous_state): + """ + Called when an attempt is made to send RST_STREAM in a non-closed + stream state. + """ + self.stream_closed_by = StreamClosedBy.SEND_RST_STREAM + + def reset_stream_on_error(self, previous_state): + """ + Called when we need to forcefully emit another RST_STREAM frame on + behalf of the state machine. + + If this is the first time we've done this, we should also hang an event + off the StreamClosedError so that the user can be informed. We know + it's the first time we've done this if the stream is currently in a + state other than CLOSED. + """ + self.stream_closed_by = StreamClosedBy.SEND_RST_STREAM + + error = StreamClosedError(self.stream_id) + + event = StreamReset() + event.stream_id = self.stream_id + event.error_code = ErrorCodes.STREAM_CLOSED + event.remote_reset = False + error._events = [event] + raise error + + def recv_on_closed_stream(self, previous_state): + """ + Called when an unexpected frame is received on an already-closed + stream. + + An endpoint that receives an unexpected frame should treat it as + a stream error or connection error with type STREAM_CLOSED, depending + on the specific frame. The error handling is done at a higher level: + this just raises the appropriate error. + """ + raise StreamClosedError(self.stream_id) + + def send_on_closed_stream(self, previous_state): + """ + Called when an attempt is made to send data on an already-closed + stream. + + This essentially overrides the standard logic by throwing a + more-specific error: StreamClosedError. This is a ProtocolError, so it + matches the standard API of the state machine, but provides more detail + to the user. + """ + raise StreamClosedError(self.stream_id) + + def recv_push_on_closed_stream(self, previous_state): + """ + Called when a PUSH_PROMISE frame is received on a full stop + stream. + + If the stream was closed by us sending a RST_STREAM frame, then we + presume that the PUSH_PROMISE was in flight when we reset the parent + stream. Rathen than accept the new stream, we just reset it. + Otherwise, we should call this a PROTOCOL_ERROR: pushing a stream on a + naturally closed stream is a real problem because it creates a brand + new stream that the remote peer now believes exists. + """ + assert self.stream_closed_by is not None + + if self.stream_closed_by == StreamClosedBy.SEND_RST_STREAM: + raise StreamClosedError(self.stream_id) + else: + raise ProtocolError("Attempted to push on closed stream.") + + def send_push_on_closed_stream(self, previous_state): + """ + Called when an attempt is made to push on an already-closed stream. + + This essentially overrides the standard logic by providing a more + useful error message. It's necessary because simply indicating that the + stream is closed is not enough: there is now a new stream that is not + allowed to be there. The only recourse is to tear the whole connection + down. + """ + raise ProtocolError("Attempted to push on closed stream.") + + def send_informational_response(self, previous_state): + """ + Called when an informational header block is sent (that is, a block + where the :status header has a 1XX value). + + Only enforces that these are sent *before* final headers are sent. + """ + if self.headers_sent: + raise ProtocolError("Information response after final response") + + event = _ResponseSent() + return [event] + + def recv_informational_response(self, previous_state): + """ + Called when an informational header block is received (that is, a block + where the :status header has a 1XX value). + """ + if self.headers_received: + raise ProtocolError("Informational response after final response") + + event = InformationalResponseReceived() + event.stream_id = self.stream_id + return [event] + + def recv_alt_svc(self, previous_state): + """ + Called when receiving an ALTSVC frame. + + RFC 7838 allows us to receive ALTSVC frames at any stream state, which + is really absurdly overzealous. For that reason, we want to limit the + states in which we can actually receive it. It's really only sensible + to receive it after we've sent our own headers and before the server + has sent its header block: the server can't guarantee that we have any + state around after it completes its header block, and the server + doesn't know what origin we're talking about before we've sent ours. + + For that reason, this function applies a few extra checks on both state + and some of the little state variables we keep around. If those suggest + an unreasonable situation for the ALTSVC frame to have been sent in, + we quietly ignore it (as RFC 7838 suggests). + + This function is also *not* always called by the state machine. In some + states (IDLE, RESERVED_LOCAL, CLOSED) we don't bother to call it, + because we know the frame cannot be valid in that state (IDLE because + the server cannot know what origin the stream applies to, CLOSED + because the server cannot assume we still have state around, + RESERVED_LOCAL because by definition if we're in the RESERVED_LOCAL + state then *we* are the server). + """ + # Servers can't receive ALTSVC frames, but RFC 7838 tells us to ignore + # them. + if self.client is False: + return [] + + # If we've received the response headers from the server they can't + # guarantee we still have any state around. Other implementations + # (like nghttp2) ignore ALTSVC in this state, so we will too. + if self.headers_received: + return [] + + # Otherwise, this is a sensible enough frame to have received. Return + # the event and let it get populated. + return [AlternativeServiceAvailable()] + + def send_alt_svc(self, previous_state): + """ + Called when sending an ALTSVC frame on this stream. + + For consistency with the restrictions we apply on receiving ALTSVC + frames in ``recv_alt_svc``, we want to restrict when users can send + ALTSVC frames to the situations when we ourselves would accept them. + + That means: when we are a server, when we have received the request + headers, and when we have not yet sent our own response headers. + """ + # We should not send ALTSVC after we've sent response headers, as the + # client may have disposed of its state. + if self.headers_sent: + raise ProtocolError( + "Cannot send ALTSVC after sending response headers." + ) + + return + + +# STATE MACHINE +# +# The stream state machine is defined here to avoid the need to allocate it +# repeatedly for each stream. It cannot be defined in the stream class because +# it needs to be able to reference the callbacks defined on the class, but +# because Python's scoping rules are weird the class object is not actually in +# scope during the body of the class object. +# +# For the sake of clarity, we reproduce the RFC 7540 state machine here: +# +# +--------+ +# send PP | | recv PP +# ,--------| idle |--------. +# / | | \ +# v +--------+ v +# +----------+ | +----------+ +# | | | send H / | | +# ,------| reserved | | recv H | reserved |------. +# | | (local) | | | (remote) | | +# | +----------+ v +----------+ | +# | | +--------+ | | +# | | recv ES | | send ES | | +# | send H | ,-------| open |-------. | recv H | +# | | / | | \ | | +# | v v +--------+ v v | +# | +----------+ | +----------+ | +# | | half | | | half | | +# | | closed | | send R / | closed | | +# | | (remote) | | recv R | (local) | | +# | +----------+ | +----------+ | +# | | | | | +# | | send ES / | recv ES / | | +# | | send R / v send R / | | +# | | recv R +--------+ recv R | | +# | send R / `----------->| |<-----------' send R / | +# | recv R | closed | recv R | +# `----------------------->| |<----------------------' +# +--------+ +# +# send: endpoint sends this frame +# recv: endpoint receives this frame +# +# H: HEADERS frame (with implied CONTINUATIONs) +# PP: PUSH_PROMISE frame (with implied CONTINUATIONs) +# ES: END_STREAM flag +# R: RST_STREAM frame +# +# For the purposes of this state machine we treat HEADERS and their +# associated CONTINUATION frames as a single jumbo frame. The protocol +# allows/requires this by preventing other frames from being interleved in +# between HEADERS/CONTINUATION frames. However, if a CONTINUATION frame is +# received without a prior HEADERS frame, it *will* be passed to this state +# machine. The state machine should always reject that frame, either as an +# invalid transition or because the stream is closed. +# +# There is a confusing relationship around PUSH_PROMISE frames. The state +# machine above considers them to be frames belonging to the new stream, +# which is *somewhat* true. However, they are sent with the stream ID of +# their related stream, and are only sendable in some cases. +# For this reason, our state machine implementation below allows for +# PUSH_PROMISE frames both in the IDLE state (as in the diagram), but also +# in the OPEN, HALF_CLOSED_LOCAL, and HALF_CLOSED_REMOTE states. +# Essentially, for hyper-h2, PUSH_PROMISE frames are effectively sent on +# two streams. +# +# The _transitions dictionary contains a mapping of tuples of +# (state, input) to tuples of (side_effect_function, end_state). This +# map contains all allowed transitions: anything not in this map is +# invalid and immediately causes a transition to ``closed``. +_transitions = { + # State: idle + (StreamState.IDLE, StreamInputs.SEND_HEADERS): + (H2StreamStateMachine.request_sent, StreamState.OPEN), + (StreamState.IDLE, StreamInputs.RECV_HEADERS): + (H2StreamStateMachine.request_received, StreamState.OPEN), + (StreamState.IDLE, StreamInputs.RECV_DATA): + (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + (StreamState.IDLE, StreamInputs.SEND_PUSH_PROMISE): + (H2StreamStateMachine.send_new_pushed_stream, + StreamState.RESERVED_LOCAL), + (StreamState.IDLE, StreamInputs.RECV_PUSH_PROMISE): + (H2StreamStateMachine.recv_new_pushed_stream, + StreamState.RESERVED_REMOTE), + (StreamState.IDLE, StreamInputs.RECV_ALTERNATIVE_SERVICE): + (None, StreamState.IDLE), + (StreamState.IDLE, StreamInputs.UPGRADE_CLIENT): + (H2StreamStateMachine.request_sent, StreamState.HALF_CLOSED_LOCAL), + (StreamState.IDLE, StreamInputs.UPGRADE_SERVER): + (H2StreamStateMachine.request_received, + StreamState.HALF_CLOSED_REMOTE), + + # State: reserved local + (StreamState.RESERVED_LOCAL, StreamInputs.SEND_HEADERS): + (H2StreamStateMachine.response_sent, StreamState.HALF_CLOSED_REMOTE), + (StreamState.RESERVED_LOCAL, StreamInputs.RECV_DATA): + (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + (StreamState.RESERVED_LOCAL, StreamInputs.SEND_WINDOW_UPDATE): + (None, StreamState.RESERVED_LOCAL), + (StreamState.RESERVED_LOCAL, StreamInputs.RECV_WINDOW_UPDATE): + (H2StreamStateMachine.window_updated, StreamState.RESERVED_LOCAL), + (StreamState.RESERVED_LOCAL, StreamInputs.SEND_RST_STREAM): + (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + (StreamState.RESERVED_LOCAL, StreamInputs.RECV_RST_STREAM): + (H2StreamStateMachine.stream_reset, StreamState.CLOSED), + (StreamState.RESERVED_LOCAL, StreamInputs.SEND_ALTERNATIVE_SERVICE): + (H2StreamStateMachine.send_alt_svc, StreamState.RESERVED_LOCAL), + (StreamState.RESERVED_LOCAL, StreamInputs.RECV_ALTERNATIVE_SERVICE): + (None, StreamState.RESERVED_LOCAL), + + # State: reserved remote + (StreamState.RESERVED_REMOTE, StreamInputs.RECV_HEADERS): + (H2StreamStateMachine.response_received, + StreamState.HALF_CLOSED_LOCAL), + (StreamState.RESERVED_REMOTE, StreamInputs.RECV_DATA): + (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + (StreamState.RESERVED_REMOTE, StreamInputs.SEND_WINDOW_UPDATE): + (None, StreamState.RESERVED_REMOTE), + (StreamState.RESERVED_REMOTE, StreamInputs.RECV_WINDOW_UPDATE): + (H2StreamStateMachine.window_updated, StreamState.RESERVED_REMOTE), + (StreamState.RESERVED_REMOTE, StreamInputs.SEND_RST_STREAM): + (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + (StreamState.RESERVED_REMOTE, StreamInputs.RECV_RST_STREAM): + (H2StreamStateMachine.stream_reset, StreamState.CLOSED), + (StreamState.RESERVED_REMOTE, StreamInputs.RECV_ALTERNATIVE_SERVICE): + (H2StreamStateMachine.recv_alt_svc, StreamState.RESERVED_REMOTE), + + # State: open + (StreamState.OPEN, StreamInputs.SEND_HEADERS): + (H2StreamStateMachine.response_sent, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.RECV_HEADERS): + (H2StreamStateMachine.response_received, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.SEND_DATA): + (None, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.RECV_DATA): + (H2StreamStateMachine.data_received, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.SEND_END_STREAM): + (None, StreamState.HALF_CLOSED_LOCAL), + (StreamState.OPEN, StreamInputs.RECV_END_STREAM): + (H2StreamStateMachine.stream_half_closed, + StreamState.HALF_CLOSED_REMOTE), + (StreamState.OPEN, StreamInputs.SEND_WINDOW_UPDATE): + (None, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.RECV_WINDOW_UPDATE): + (H2StreamStateMachine.window_updated, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.SEND_RST_STREAM): + (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + (StreamState.OPEN, StreamInputs.RECV_RST_STREAM): + (H2StreamStateMachine.stream_reset, StreamState.CLOSED), + (StreamState.OPEN, StreamInputs.SEND_PUSH_PROMISE): + (H2StreamStateMachine.send_push_promise, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.RECV_PUSH_PROMISE): + (H2StreamStateMachine.recv_push_promise, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.SEND_INFORMATIONAL_HEADERS): + (H2StreamStateMachine.send_informational_response, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.RECV_INFORMATIONAL_HEADERS): + (H2StreamStateMachine.recv_informational_response, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.SEND_ALTERNATIVE_SERVICE): + (H2StreamStateMachine.send_alt_svc, StreamState.OPEN), + (StreamState.OPEN, StreamInputs.RECV_ALTERNATIVE_SERVICE): + (H2StreamStateMachine.recv_alt_svc, StreamState.OPEN), + + # State: half-closed remote + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_HEADERS): + (H2StreamStateMachine.response_sent, StreamState.HALF_CLOSED_REMOTE), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_HEADERS): + (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_DATA): + (None, StreamState.HALF_CLOSED_REMOTE), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_DATA): + (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_END_STREAM): + (H2StreamStateMachine.send_end_stream, StreamState.CLOSED), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_WINDOW_UPDATE): + (None, StreamState.HALF_CLOSED_REMOTE), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_WINDOW_UPDATE): + (H2StreamStateMachine.window_updated, StreamState.HALF_CLOSED_REMOTE), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_RST_STREAM): + (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_RST_STREAM): + (H2StreamStateMachine.stream_reset, StreamState.CLOSED), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_PUSH_PROMISE): + (H2StreamStateMachine.send_push_promise, + StreamState.HALF_CLOSED_REMOTE), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_PUSH_PROMISE): + (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_INFORMATIONAL_HEADERS): + (H2StreamStateMachine.send_informational_response, + StreamState.HALF_CLOSED_REMOTE), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_ALTERNATIVE_SERVICE): + (H2StreamStateMachine.send_alt_svc, StreamState.HALF_CLOSED_REMOTE), + (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_ALTERNATIVE_SERVICE): + (H2StreamStateMachine.recv_alt_svc, StreamState.HALF_CLOSED_REMOTE), + + # State: half-closed local + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_HEADERS): + (H2StreamStateMachine.response_received, + StreamState.HALF_CLOSED_LOCAL), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_DATA): + (H2StreamStateMachine.data_received, StreamState.HALF_CLOSED_LOCAL), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_END_STREAM): + (H2StreamStateMachine.stream_ended, StreamState.CLOSED), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_WINDOW_UPDATE): + (None, StreamState.HALF_CLOSED_LOCAL), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_WINDOW_UPDATE): + (H2StreamStateMachine.window_updated, StreamState.HALF_CLOSED_LOCAL), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_RST_STREAM): + (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_RST_STREAM): + (H2StreamStateMachine.stream_reset, StreamState.CLOSED), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_PUSH_PROMISE): + (H2StreamStateMachine.recv_push_promise, + StreamState.HALF_CLOSED_LOCAL), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_INFORMATIONAL_HEADERS): + (H2StreamStateMachine.recv_informational_response, + StreamState.HALF_CLOSED_LOCAL), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_ALTERNATIVE_SERVICE): + (H2StreamStateMachine.send_alt_svc, StreamState.HALF_CLOSED_LOCAL), + (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_ALTERNATIVE_SERVICE): + (H2StreamStateMachine.recv_alt_svc, StreamState.HALF_CLOSED_LOCAL), + + # State: closed + (StreamState.CLOSED, StreamInputs.RECV_END_STREAM): + (None, StreamState.CLOSED), + (StreamState.CLOSED, StreamInputs.RECV_ALTERNATIVE_SERVICE): + (None, StreamState.CLOSED), + + # RFC 7540 Section 5.1 defines how the end point should react when + # receiving a frame on a closed stream with the following statements: + # + # > An endpoint that receives any frame other than PRIORITY after receiving + # > a RST_STREAM MUST treat that as a stream error of type STREAM_CLOSED. + # > An endpoint that receives any frames after receiving a frame with the + # > END_STREAM flag set MUST treat that as a connection error of type + # > STREAM_CLOSED. + (StreamState.CLOSED, StreamInputs.RECV_HEADERS): + (H2StreamStateMachine.recv_on_closed_stream, StreamState.CLOSED), + (StreamState.CLOSED, StreamInputs.RECV_DATA): + (H2StreamStateMachine.recv_on_closed_stream, StreamState.CLOSED), + + # > WINDOW_UPDATE or RST_STREAM frames can be received in this state + # > for a short period after a DATA or HEADERS frame containing a + # > END_STREAM flag is sent, as instructed in RFC 7540 Section 5.1. But we + # > don't have access to a clock so we just always allow it. + (StreamState.CLOSED, StreamInputs.RECV_WINDOW_UPDATE): + (None, StreamState.CLOSED), + (StreamState.CLOSED, StreamInputs.RECV_RST_STREAM): + (None, StreamState.CLOSED), + + # > A receiver MUST treat the receipt of a PUSH_PROMISE on a stream that is + # > neither "open" nor "half-closed (local)" as a connection error of type + # > PROTOCOL_ERROR. + (StreamState.CLOSED, StreamInputs.RECV_PUSH_PROMISE): + (H2StreamStateMachine.recv_push_on_closed_stream, StreamState.CLOSED), + + # Also, users should be forbidden from sending on closed streams. + (StreamState.CLOSED, StreamInputs.SEND_HEADERS): + (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), + (StreamState.CLOSED, StreamInputs.SEND_PUSH_PROMISE): + (H2StreamStateMachine.send_push_on_closed_stream, StreamState.CLOSED), + (StreamState.CLOSED, StreamInputs.SEND_RST_STREAM): + (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), + (StreamState.CLOSED, StreamInputs.SEND_DATA): + (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), + (StreamState.CLOSED, StreamInputs.SEND_WINDOW_UPDATE): + (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), + (StreamState.CLOSED, StreamInputs.SEND_END_STREAM): + (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), +} + + +class H2Stream(object): + """ + A low-level HTTP/2 stream object. This handles building and receiving + frames and maintains per-stream state. + + This wraps a HTTP/2 Stream state machine implementation, ensuring that + frames can only be sent/received when the stream is in a valid state. + Attempts to create frames that cannot be sent will raise a + ``ProtocolError``. + """ + def __init__(self, + stream_id, + config, + inbound_window_size, + outbound_window_size): + self.state_machine = H2StreamStateMachine(stream_id) + self.stream_id = stream_id + self.max_outbound_frame_size = None + self.request_method = None + + # The current value of the outbound stream flow control window + self.outbound_flow_control_window = outbound_window_size + + # The flow control manager. + self._inbound_window_manager = WindowManager(inbound_window_size) + + # The expected content length, if any. + self._expected_content_length = None + + # The actual received content length. Always tracked. + self._actual_content_length = 0 + + # The authority we believe this stream belongs to. + self._authority = None + + # The configuration for this stream. + self.config = config + + def __repr__(self): + return "<%s id:%d state:%r>" % ( + type(self).__name__, + self.stream_id, + self.state_machine.state + ) + + @property + def inbound_flow_control_window(self): + """ + The size of the inbound flow control window for the stream. This is + rarely publicly useful: instead, use :meth:`remote_flow_control_window + `. This shortcut is + largely present to provide a shortcut to this data. + """ + return self._inbound_window_manager.current_window_size + + @property + def open(self): + """ + Whether the stream is 'open' in any sense: that is, whether it counts + against the number of concurrent streams. + """ + # RFC 7540 Section 5.1.2 defines 'open' for this purpose to mean either + # the OPEN state or either of the HALF_CLOSED states. Perplexingly, + # this excludes the reserved states. + # For more detail on why we're doing this in this slightly weird way, + # see the comment on ``STREAM_OPEN`` at the top of the file. + return STREAM_OPEN[self.state_machine.state] + + @property + def closed(self): + """ + Whether the stream is closed. + """ + return self.state_machine.state == StreamState.CLOSED + + @property + def closed_by(self): + """ + Returns how the stream was closed, as one of StreamClosedBy. + """ + return self.state_machine.stream_closed_by + + def upgrade(self, client_side): + """ + Called by the connection to indicate that this stream is the initial + request/response of an upgraded connection. Places the stream into an + appropriate state. + """ + self.config.logger.debug("Upgrading %r", self) + + assert self.stream_id == 1 + input_ = ( + StreamInputs.UPGRADE_CLIENT if client_side + else StreamInputs.UPGRADE_SERVER + ) + + # This may return events, we deliberately don't want them. + self.state_machine.process_input(input_) + return + + def send_headers(self, headers, encoder, end_stream=False): + """ + Returns a list of HEADERS/CONTINUATION frames to emit as either headers + or trailers. + """ + self.config.logger.debug("Send headers %s on %r", headers, self) + + # Because encoding headers makes an irreversible change to the header + # compression context, we make the state transition before we encode + # them. + + # First, check if we're a client. If we are, no problem: if we aren't, + # we need to scan the header block to see if this is an informational + # response. + input_ = StreamInputs.SEND_HEADERS + if ((not self.state_machine.client) and + is_informational_response(headers)): + if end_stream: + raise ProtocolError( + "Cannot set END_STREAM on informational responses." + ) + + input_ = StreamInputs.SEND_INFORMATIONAL_HEADERS + + events = self.state_machine.process_input(input_) + + hf = HeadersFrame(self.stream_id) + hdr_validation_flags = self._build_hdr_validation_flags(events) + frames = self._build_headers_frames( + headers, encoder, hf, hdr_validation_flags + ) + + if end_stream: + # Not a bug: the END_STREAM flag is valid on the initial HEADERS + # frame, not the CONTINUATION frames that follow. + self.state_machine.process_input(StreamInputs.SEND_END_STREAM) + frames[0].flags.add('END_STREAM') + + if self.state_machine.trailers_sent and not end_stream: + raise ProtocolError("Trailers must have END_STREAM set.") + + if self.state_machine.client and self._authority is None: + self._authority = authority_from_headers(headers) + + # store request method for _initialize_content_length + self.request_method = extract_method_header(headers) + + return frames + + def push_stream_in_band(self, related_stream_id, headers, encoder): + """ + Returns a list of PUSH_PROMISE/CONTINUATION frames to emit as a pushed + stream header. Called on the stream that has the PUSH_PROMISE frame + sent on it. + """ + self.config.logger.debug("Push stream %r", self) + + # Because encoding headers makes an irreversible change to the header + # compression context, we make the state transition *first*. + + events = self.state_machine.process_input( + StreamInputs.SEND_PUSH_PROMISE + ) + + ppf = PushPromiseFrame(self.stream_id) + ppf.promised_stream_id = related_stream_id + hdr_validation_flags = self._build_hdr_validation_flags(events) + frames = self._build_headers_frames( + headers, encoder, ppf, hdr_validation_flags + ) + + return frames + + def locally_pushed(self): + """ + Mark this stream as one that was pushed by this peer. Must be called + immediately after initialization. Sends no frames, simply updates the + state machine. + """ + # This does not trigger any events. + events = self.state_machine.process_input( + StreamInputs.SEND_PUSH_PROMISE + ) + assert not events + return [] + + def send_data(self, data, end_stream=False, pad_length=None): + """ + Prepare some data frames. Optionally end the stream. + + .. warning:: Does not perform flow control checks. + """ + self.config.logger.debug( + "Send data on %r with end stream set to %s", self, end_stream + ) + + self.state_machine.process_input(StreamInputs.SEND_DATA) + + df = DataFrame(self.stream_id) + df.data = data + if end_stream: + self.state_machine.process_input(StreamInputs.SEND_END_STREAM) + df.flags.add('END_STREAM') + if pad_length is not None: + df.flags.add('PADDED') + df.pad_length = pad_length + + # Subtract flow_controlled_length to account for possible padding + self.outbound_flow_control_window -= df.flow_controlled_length + assert self.outbound_flow_control_window >= 0 + + return [df] + + def end_stream(self): + """ + End a stream without sending data. + """ + self.config.logger.debug("End stream %r", self) + + self.state_machine.process_input(StreamInputs.SEND_END_STREAM) + df = DataFrame(self.stream_id) + df.flags.add('END_STREAM') + return [df] + + def advertise_alternative_service(self, field_value): + """ + Advertise an RFC 7838 alternative service. The semantics of this are + better documented in the ``H2Connection`` class. + """ + self.config.logger.debug( + "Advertise alternative service of %r for %r", field_value, self + ) + self.state_machine.process_input(StreamInputs.SEND_ALTERNATIVE_SERVICE) + asf = AltSvcFrame(self.stream_id) + asf.field = field_value + return [asf] + + def increase_flow_control_window(self, increment): + """ + Increase the size of the flow control window for the remote side. + """ + self.config.logger.debug( + "Increase flow control window for %r by %d", + self, increment + ) + self.state_machine.process_input(StreamInputs.SEND_WINDOW_UPDATE) + self._inbound_window_manager.window_opened(increment) + + wuf = WindowUpdateFrame(self.stream_id) + wuf.window_increment = increment + return [wuf] + + def receive_push_promise_in_band(self, + promised_stream_id, + headers, + header_encoding): + """ + Receives a push promise frame sent on this stream, pushing a remote + stream. This is called on the stream that has the PUSH_PROMISE sent + on it. + """ + self.config.logger.debug( + "Receive Push Promise on %r for remote stream %d", + self, promised_stream_id + ) + events = self.state_machine.process_input( + StreamInputs.RECV_PUSH_PROMISE + ) + events[0].pushed_stream_id = promised_stream_id + + hdr_validation_flags = self._build_hdr_validation_flags(events) + events[0].headers = self._process_received_headers( + headers, hdr_validation_flags, header_encoding + ) + return [], events + + def remotely_pushed(self, pushed_headers): + """ + Mark this stream as one that was pushed by the remote peer. Must be + called immediately after initialization. Sends no frames, simply + updates the state machine. + """ + self.config.logger.debug("%r pushed by remote peer", self) + events = self.state_machine.process_input( + StreamInputs.RECV_PUSH_PROMISE + ) + self._authority = authority_from_headers(pushed_headers) + return [], events + + def receive_headers(self, headers, end_stream, header_encoding): + """ + Receive a set of headers (or trailers). + """ + if is_informational_response(headers): + if end_stream: + raise ProtocolError( + "Cannot set END_STREAM on informational responses" + ) + input_ = StreamInputs.RECV_INFORMATIONAL_HEADERS + else: + input_ = StreamInputs.RECV_HEADERS + + events = self.state_machine.process_input(input_) + + if end_stream: + es_events = self.state_machine.process_input( + StreamInputs.RECV_END_STREAM + ) + events[0].stream_ended = es_events[0] + events += es_events + + self._initialize_content_length(headers) + + if isinstance(events[0], TrailersReceived): + if not end_stream: + raise ProtocolError("Trailers must have END_STREAM set") + + hdr_validation_flags = self._build_hdr_validation_flags(events) + events[0].headers = self._process_received_headers( + headers, hdr_validation_flags, header_encoding + ) + return [], events + + def receive_data(self, data, end_stream, flow_control_len): + """ + Receive some data. + """ + self.config.logger.debug( + "Receive data on %r with end stream %s and flow control length " + "set to %d", self, end_stream, flow_control_len + ) + events = self.state_machine.process_input(StreamInputs.RECV_DATA) + self._inbound_window_manager.window_consumed(flow_control_len) + self._track_content_length(len(data), end_stream) + + if end_stream: + es_events = self.state_machine.process_input( + StreamInputs.RECV_END_STREAM + ) + events[0].stream_ended = es_events[0] + events.extend(es_events) + + events[0].data = data + events[0].flow_controlled_length = flow_control_len + return [], events + + def receive_window_update(self, increment): + """ + Handle a WINDOW_UPDATE increment. + """ + self.config.logger.debug( + "Receive Window Update on %r for increment of %d", + self, increment + ) + events = self.state_machine.process_input( + StreamInputs.RECV_WINDOW_UPDATE + ) + frames = [] + + # If we encounter a problem with incrementing the flow control window, + # this should be treated as a *stream* error, not a *connection* error. + # That means we need to catch the error and forcibly close the stream. + if events: + events[0].delta = increment + try: + self.outbound_flow_control_window = guard_increment_window( + self.outbound_flow_control_window, + increment + ) + except FlowControlError: + # Ok, this is bad. We're going to need to perform a local + # reset. + event = StreamReset() + event.stream_id = self.stream_id + event.error_code = ErrorCodes.FLOW_CONTROL_ERROR + event.remote_reset = False + + events = [event] + frames = self.reset_stream(event.error_code) + + return frames, events + + def receive_continuation(self): + """ + A naked CONTINUATION frame has been received. This is always an error, + but the type of error it is depends on the state of the stream and must + transition the state of the stream, so we need to handle it. + """ + self.config.logger.debug("Receive Continuation frame on %r", self) + self.state_machine.process_input( + StreamInputs.RECV_CONTINUATION + ) + assert False, "Should not be reachable" + + def receive_alt_svc(self, frame): + """ + An Alternative Service frame was received on the stream. This frame + inherits the origin associated with this stream. + """ + self.config.logger.debug( + "Receive Alternative Service frame on stream %r", self + ) + + # If the origin is present, RFC 7838 says we have to ignore it. + if frame.origin: + return [], [] + + events = self.state_machine.process_input( + StreamInputs.RECV_ALTERNATIVE_SERVICE + ) + + # There are lots of situations where we want to ignore the ALTSVC + # frame. If we need to pay attention, we'll have an event and should + # fill it out. + if events: + assert isinstance(events[0], AlternativeServiceAvailable) + events[0].origin = self._authority + events[0].field_value = frame.field + + return [], events + + def reset_stream(self, error_code=0): + """ + Close the stream locally. Reset the stream with an error code. + """ + self.config.logger.debug( + "Local reset %r with error code: %d", self, error_code + ) + self.state_machine.process_input(StreamInputs.SEND_RST_STREAM) + + rsf = RstStreamFrame(self.stream_id) + rsf.error_code = error_code + return [rsf] + + def stream_reset(self, frame): + """ + Handle a stream being reset remotely. + """ + self.config.logger.debug( + "Remote reset %r with error code: %d", self, frame.error_code + ) + events = self.state_machine.process_input(StreamInputs.RECV_RST_STREAM) + + if events: + # We don't fire an event if this stream is already closed. + events[0].error_code = _error_code_from_int(frame.error_code) + + return [], events + + def acknowledge_received_data(self, acknowledged_size): + """ + The user has informed us that they've processed some amount of data + that was received on this stream. Pass that to the window manager and + potentially return some WindowUpdate frames. + """ + self.config.logger.debug( + "Acknowledge received data with size %d on %r", + acknowledged_size, self + ) + increment = self._inbound_window_manager.process_bytes( + acknowledged_size + ) + if increment: + f = WindowUpdateFrame(self.stream_id) + f.window_increment = increment + return [f] + + return [] + + def _build_hdr_validation_flags(self, events): + """ + Constructs a set of header validation flags for use when normalizing + and validating header blocks. + """ + is_trailer = isinstance( + events[0], (_TrailersSent, TrailersReceived) + ) + is_response_header = isinstance( + events[0], + ( + _ResponseSent, + ResponseReceived, + InformationalResponseReceived + ) + ) + is_push_promise = isinstance( + events[0], (PushedStreamReceived, _PushedRequestSent) + ) + + return HeaderValidationFlags( + is_client=self.state_machine.client, + is_trailer=is_trailer, + is_response_header=is_response_header, + is_push_promise=is_push_promise, + ) + + def _build_headers_frames(self, + headers, + encoder, + first_frame, + hdr_validation_flags): + """ + Helper method to build headers or push promise frames. + """ + # We need to lowercase the header names, and to ensure that secure + # header fields are kept out of compression contexts. + if self.config.normalize_outbound_headers: + headers = normalize_outbound_headers( + headers, hdr_validation_flags + ) + if self.config.validate_outbound_headers: + headers = validate_outbound_headers( + headers, hdr_validation_flags + ) + + encoded_headers = encoder.encode(headers) + + # Slice into blocks of max_outbound_frame_size. Be careful with this: + # it only works right because we never send padded frames or priority + # information on the frames. Revisit this if we do. + header_blocks = [ + encoded_headers[i:i+self.max_outbound_frame_size] + for i in range( + 0, len(encoded_headers), self.max_outbound_frame_size + ) + ] + + frames = [] + first_frame.data = header_blocks[0] + frames.append(first_frame) + + for block in header_blocks[1:]: + cf = ContinuationFrame(self.stream_id) + cf.data = block + frames.append(cf) + + frames[-1].flags.add('END_HEADERS') + return frames + + def _process_received_headers(self, + headers, + header_validation_flags, + header_encoding): + """ + When headers have been received from the remote peer, run a processing + pipeline on them to transform them into the appropriate form for + attaching to an event. + """ + if self.config.normalize_inbound_headers: + headers = normalize_inbound_headers( + headers, header_validation_flags + ) + + if self.config.validate_inbound_headers: + headers = validate_headers(headers, header_validation_flags) + + if header_encoding: + headers = _decode_headers(headers, header_encoding) + + # The above steps are all generators, so we need to concretize the + # headers now. + return list(headers) + + def _initialize_content_length(self, headers): + """ + Checks the headers for a content-length header and initializes the + _expected_content_length field from it. It's not an error for no + Content-Length header to be present. + """ + if self.request_method == b'HEAD': + self._expected_content_length = 0 + return + + for n, v in headers: + if n == b'content-length': + try: + self._expected_content_length = int(v, 10) + except ValueError: + raise ProtocolError( + "Invalid content-length header: %s" % v + ) + + return + + def _track_content_length(self, length, end_stream): + """ + Update the expected content length in response to data being received. + Validates that the appropriate amount of data is sent. Always updates + the received data, but only validates the length against the + content-length header if one was sent. + + :param length: The length of the body chunk received. + :param end_stream: If this is the last body chunk received. + """ + self._actual_content_length += length + actual = self._actual_content_length + expected = self._expected_content_length + + if expected is not None: + if expected < actual: + raise InvalidBodyLengthError(expected, actual) + + if end_stream and expected != actual: + raise InvalidBodyLengthError(expected, actual) + + def _inbound_flow_control_change_from_settings(self, delta): + """ + We changed SETTINGS_INITIAL_WINDOW_SIZE, which means we need to + update the target window size for flow control. For our flow control + strategy, this means we need to do two things: we need to adjust the + current window size, but we also need to set the target maximum window + size to the new value. + """ + new_max_size = self._inbound_window_manager.max_window_size + delta + self._inbound_window_manager.window_opened(delta) + self._inbound_window_manager.max_window_size = new_max_size + + +def _decode_headers(headers, encoding): + """ + Given an iterable of header two-tuples and an encoding, decodes those + headers using that encoding while preserving the type of the header tuple. + This ensures that the use of ``HeaderTuple`` is preserved. + """ + for header in headers: + # This function expects to work on decoded headers, which are always + # HeaderTuple objects. + assert isinstance(header, HeaderTuple) + + name, value = header + name = name.decode(encoding) + value = value.decode(encoding) + yield header.__class__(name, value) diff --git a/py3/lib/python3.6/site-packages/h2/utilities.py b/py3/lib/python3.6/site-packages/h2/utilities.py new file mode 100644 index 0000000..06c916e --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/utilities.py @@ -0,0 +1,660 @@ +# -*- coding: utf-8 -*- +""" +h2/utilities +~~~~~~~~~~~~ + +Utility functions that do not belong in a separate module. +""" +import collections +import re +from string import whitespace +import sys + +from hpack import HeaderTuple, NeverIndexedHeaderTuple + +from .exceptions import ProtocolError, FlowControlError + +UPPER_RE = re.compile(b"[A-Z]") + +# A set of headers that are hop-by-hop or connection-specific and thus +# forbidden in HTTP/2. This list comes from RFC 7540 § 8.1.2.2. +CONNECTION_HEADERS = frozenset([ + b'connection', u'connection', + b'proxy-connection', u'proxy-connection', + b'keep-alive', u'keep-alive', + b'transfer-encoding', u'transfer-encoding', + b'upgrade', u'upgrade', +]) + + +_ALLOWED_PSEUDO_HEADER_FIELDS = frozenset([ + b':method', u':method', + b':scheme', u':scheme', + b':authority', u':authority', + b':path', u':path', + b':status', u':status', + b':protocol', u':protocol', +]) + + +_SECURE_HEADERS = frozenset([ + # May have basic credentials which are vulnerable to dictionary attacks. + b'authorization', u'authorization', + b'proxy-authorization', u'proxy-authorization', +]) + + +_REQUEST_ONLY_HEADERS = frozenset([ + b':scheme', u':scheme', + b':path', u':path', + b':authority', u':authority', + b':method', u':method', + b':protocol', u':protocol', +]) + + +_RESPONSE_ONLY_HEADERS = frozenset([b':status', u':status']) + + +# A Set of pseudo headers that are only valid if the method is +# CONNECT, see RFC 8441 § 5 +_CONNECT_REQUEST_ONLY_HEADERS = frozenset([b':protocol', u':protocol']) + + +if sys.version_info[0] == 2: # Python 2.X + _WHITESPACE = frozenset(whitespace) +else: # Python 3.3+ + _WHITESPACE = frozenset(map(ord, whitespace)) + + +def _secure_headers(headers, hdr_validation_flags): + """ + Certain headers are at risk of being attacked during the header compression + phase, and so need to be kept out of header compression contexts. This + function automatically transforms certain specific headers into HPACK + never-indexed fields to ensure they don't get added to header compression + contexts. + + This function currently implements two rules: + + - 'authorization' and 'proxy-authorization' fields are automatically made + never-indexed. + - Any 'cookie' header field shorter than 20 bytes long is made + never-indexed. + + These fields are the most at-risk. These rules are inspired by Firefox + and nghttp2. + """ + for header in headers: + if header[0] in _SECURE_HEADERS: + yield NeverIndexedHeaderTuple(*header) + elif header[0] in (b'cookie', u'cookie') and len(header[1]) < 20: + yield NeverIndexedHeaderTuple(*header) + else: + yield header + + +def extract_method_header(headers): + """ + Extracts the request method from the headers list. + """ + for k, v in headers: + if k in (b':method', u':method'): + if not isinstance(v, bytes): + return v.encode('utf-8') + else: + return v + + +def is_informational_response(headers): + """ + Searches a header block for a :status header to confirm that a given + collection of headers are an informational response. Assumes the header + block is well formed: that is, that the HTTP/2 special headers are first + in the block, and so that it can stop looking when it finds the first + header field whose name does not begin with a colon. + + :param headers: The HTTP/2 header block. + :returns: A boolean indicating if this is an informational response. + """ + for n, v in headers: + if isinstance(n, bytes): + sigil = b':' + status = b':status' + informational_start = b'1' + else: + sigil = u':' + status = u':status' + informational_start = u'1' + + # If we find a non-special header, we're done here: stop looping. + if not n.startswith(sigil): + return False + + # This isn't the status header, bail. + if n != status: + continue + + # If the first digit is a 1, we've got informational headers. + return v.startswith(informational_start) + + +def guard_increment_window(current, increment): + """ + Increments a flow control window, guarding against that window becoming too + large. + + :param current: The current value of the flow control window. + :param increment: The increment to apply to that window. + :returns: The new value of the window. + :raises: ``FlowControlError`` + """ + # The largest value the flow control window may take. + LARGEST_FLOW_CONTROL_WINDOW = 2**31 - 1 + + new_size = current + increment + + if new_size > LARGEST_FLOW_CONTROL_WINDOW: + raise FlowControlError( + "May not increment flow control window past %d" % + LARGEST_FLOW_CONTROL_WINDOW + ) + + return new_size + + +def authority_from_headers(headers): + """ + Given a header set, searches for the authority header and returns the + value. + + Note that this doesn't terminate early, so should only be called if the + headers are for a client request. Otherwise, will loop over the entire + header set, which is potentially unwise. + + :param headers: The HTTP header set. + :returns: The value of the authority header, or ``None``. + :rtype: ``bytes`` or ``None``. + """ + for n, v in headers: + # This gets run against headers that come both from HPACK and from the + # user, so we may have unicode floating around in here. We only want + # bytes. + if n in (b':authority', u':authority'): + return v.encode('utf-8') if not isinstance(v, bytes) else v + + return None + + +# Flags used by the validate_headers pipeline to determine which checks +# should be applied to a given set of headers. +HeaderValidationFlags = collections.namedtuple( + 'HeaderValidationFlags', + ['is_client', 'is_trailer', 'is_response_header', 'is_push_promise'] +) + + +def validate_headers(headers, hdr_validation_flags): + """ + Validates a header sequence against a set of constraints from RFC 7540. + + :param headers: The HTTP header set. + :param hdr_validation_flags: An instance of HeaderValidationFlags. + """ + # This validation logic is built on a sequence of generators that are + # iterated over to provide the final header list. This reduces some of the + # overhead of doing this checking. However, it's worth noting that this + # checking remains somewhat expensive, and attempts should be made wherever + # possible to reduce the time spent doing them. + # + # For example, we avoid tuple upacking in loops because it represents a + # fixed cost that we don't want to spend, instead indexing into the header + # tuples. + headers = _reject_uppercase_header_fields( + headers, hdr_validation_flags + ) + headers = _reject_surrounding_whitespace( + headers, hdr_validation_flags + ) + headers = _reject_te( + headers, hdr_validation_flags + ) + headers = _reject_connection_header( + headers, hdr_validation_flags + ) + headers = _reject_pseudo_header_fields( + headers, hdr_validation_flags + ) + headers = _check_host_authority_header( + headers, hdr_validation_flags + ) + headers = _check_path_header(headers, hdr_validation_flags) + + return headers + + +def _reject_uppercase_header_fields(headers, hdr_validation_flags): + """ + Raises a ProtocolError if any uppercase character is found in a header + block. + """ + for header in headers: + if UPPER_RE.search(header[0]): + raise ProtocolError( + "Received uppercase header name %s." % header[0]) + yield header + + +def _reject_surrounding_whitespace(headers, hdr_validation_flags): + """ + Raises a ProtocolError if any header name or value is surrounded by + whitespace characters. + """ + # For compatibility with RFC 7230 header fields, we need to allow the field + # value to be an empty string. This is ludicrous, but technically allowed. + # The field name may not be empty, though, so we can safely assume that it + # must have at least one character in it and throw exceptions if it + # doesn't. + for header in headers: + if header[0][0] in _WHITESPACE or header[0][-1] in _WHITESPACE: + raise ProtocolError( + "Received header name surrounded by whitespace %r" % header[0]) + if header[1] and ((header[1][0] in _WHITESPACE) or + (header[1][-1] in _WHITESPACE)): + raise ProtocolError( + "Received header value surrounded by whitespace %r" % header[1] + ) + yield header + + +def _reject_te(headers, hdr_validation_flags): + """ + Raises a ProtocolError if the TE header is present in a header block and + its value is anything other than "trailers". + """ + for header in headers: + if header[0] in (b'te', u'te'): + if header[1].lower() not in (b'trailers', u'trailers'): + raise ProtocolError( + "Invalid value for Transfer-Encoding header: %s" % + header[1] + ) + + yield header + + +def _reject_connection_header(headers, hdr_validation_flags): + """ + Raises a ProtocolError if the Connection header is present in a header + block. + """ + for header in headers: + if header[0] in CONNECTION_HEADERS: + raise ProtocolError( + "Connection-specific header field present: %s." % header[0] + ) + + yield header + + +def _custom_startswith(test_string, bytes_prefix, unicode_prefix): + """ + Given a string that might be a bytestring or a Unicode string, + return True if it starts with the appropriate prefix. + """ + if isinstance(test_string, bytes): + return test_string.startswith(bytes_prefix) + else: + return test_string.startswith(unicode_prefix) + + +def _assert_header_in_set(string_header, bytes_header, header_set): + """ + Given a set of header names, checks whether the string or byte version of + the header name is present. Raises a Protocol error with the appropriate + error if it's missing. + """ + if not (string_header in header_set or bytes_header in header_set): + raise ProtocolError( + "Header block missing mandatory %s header" % string_header + ) + + +def _reject_pseudo_header_fields(headers, hdr_validation_flags): + """ + Raises a ProtocolError if duplicate pseudo-header fields are found in a + header block or if a pseudo-header field appears in a block after an + ordinary header field. + + Raises a ProtocolError if pseudo-header fields are found in trailers. + """ + seen_pseudo_header_fields = set() + seen_regular_header = False + method = None + + for header in headers: + if _custom_startswith(header[0], b':', u':'): + if header[0] in seen_pseudo_header_fields: + raise ProtocolError( + "Received duplicate pseudo-header field %s" % header[0] + ) + + seen_pseudo_header_fields.add(header[0]) + + if seen_regular_header: + raise ProtocolError( + "Received pseudo-header field out of sequence: %s" % + header[0] + ) + + if header[0] not in _ALLOWED_PSEUDO_HEADER_FIELDS: + raise ProtocolError( + "Received custom pseudo-header field %s" % header[0] + ) + + if header[0] in (b':method', u':method'): + if not isinstance(header[1], bytes): + method = header[1].encode('utf-8') + else: + method = header[1] + + else: + seen_regular_header = True + + yield header + + # Check the pseudo-headers we got to confirm they're acceptable. + _check_pseudo_header_field_acceptability( + seen_pseudo_header_fields, method, hdr_validation_flags + ) + + +def _check_pseudo_header_field_acceptability(pseudo_headers, + method, + hdr_validation_flags): + """ + Given the set of pseudo-headers present in a header block and the + validation flags, confirms that RFC 7540 allows them. + """ + # Pseudo-header fields MUST NOT appear in trailers - RFC 7540 § 8.1.2.1 + if hdr_validation_flags.is_trailer and pseudo_headers: + raise ProtocolError( + "Received pseudo-header in trailer %s" % pseudo_headers + ) + + # If ':status' pseudo-header is not there in a response header, reject it. + # Similarly, if ':path', ':method', or ':scheme' are not there in a request + # header, reject it. Additionally, if a response contains any request-only + # headers or vice-versa, reject it. + # Relevant RFC section: RFC 7540 § 8.1.2.4 + # https://tools.ietf.org/html/rfc7540#section-8.1.2.4 + if hdr_validation_flags.is_response_header: + _assert_header_in_set(u':status', b':status', pseudo_headers) + invalid_response_headers = pseudo_headers & _REQUEST_ONLY_HEADERS + if invalid_response_headers: + raise ProtocolError( + "Encountered request-only headers %s" % + invalid_response_headers + ) + elif (not hdr_validation_flags.is_response_header and + not hdr_validation_flags.is_trailer): + # This is a request, so we need to have seen :path, :method, and + # :scheme. + _assert_header_in_set(u':path', b':path', pseudo_headers) + _assert_header_in_set(u':method', b':method', pseudo_headers) + _assert_header_in_set(u':scheme', b':scheme', pseudo_headers) + invalid_request_headers = pseudo_headers & _RESPONSE_ONLY_HEADERS + if invalid_request_headers: + raise ProtocolError( + "Encountered response-only headers %s" % + invalid_request_headers + ) + if method != b'CONNECT': + invalid_headers = pseudo_headers & _CONNECT_REQUEST_ONLY_HEADERS + if invalid_headers: + raise ProtocolError( + "Encountered connect-request-only headers %s" % + invalid_headers + ) + + +def _validate_host_authority_header(headers): + """ + Given the :authority and Host headers from a request block that isn't + a trailer, check that: + 1. At least one of these headers is set. + 2. If both headers are set, they match. + + :param headers: The HTTP header set. + :raises: ``ProtocolError`` + """ + # We use None as a sentinel value. Iterate over the list of headers, + # and record the value of these headers (if present). We don't need + # to worry about receiving duplicate :authority headers, as this is + # enforced by the _reject_pseudo_header_fields() pipeline. + # + # TODO: We should also guard against receiving duplicate Host headers, + # and against sending duplicate headers. + authority_header_val = None + host_header_val = None + + for header in headers: + if header[0] in (b':authority', u':authority'): + authority_header_val = header[1] + elif header[0] in (b'host', u'host'): + host_header_val = header[1] + + yield header + + # If we have not-None values for these variables, then we know we saw + # the corresponding header. + authority_present = (authority_header_val is not None) + host_present = (host_header_val is not None) + + # It is an error for a request header block to contain neither + # an :authority header nor a Host header. + if not authority_present and not host_present: + raise ProtocolError( + "Request header block does not have an :authority or Host header." + ) + + # If we receive both headers, they should definitely match. + if authority_present and host_present: + if authority_header_val != host_header_val: + raise ProtocolError( + "Request header block has mismatched :authority and " + "Host headers: %r / %r" + % (authority_header_val, host_header_val) + ) + + +def _check_host_authority_header(headers, hdr_validation_flags): + """ + Raises a ProtocolError if a header block arrives that does not contain an + :authority or a Host header, or if a header block contains both fields, + but their values do not match. + """ + # We only expect to see :authority and Host headers on request header + # blocks that aren't trailers, so skip this validation if this is a + # response header or we're looking at trailer blocks. + skip_validation = ( + hdr_validation_flags.is_response_header or + hdr_validation_flags.is_trailer + ) + if skip_validation: + return headers + + return _validate_host_authority_header(headers) + + +def _check_path_header(headers, hdr_validation_flags): + """ + Raise a ProtocolError if a header block arrives or is sent that contains an + empty :path header. + """ + def inner(): + for header in headers: + if header[0] in (b':path', u':path'): + if not header[1]: + raise ProtocolError("An empty :path header is forbidden") + + yield header + + # We only expect to see :authority and Host headers on request header + # blocks that aren't trailers, so skip this validation if this is a + # response header or we're looking at trailer blocks. + skip_validation = ( + hdr_validation_flags.is_response_header or + hdr_validation_flags.is_trailer + ) + if skip_validation: + return headers + else: + return inner() + + +def _lowercase_header_names(headers, hdr_validation_flags): + """ + Given an iterable of header two-tuples, rebuilds that iterable with the + header names lowercased. This generator produces tuples that preserve the + original type of the header tuple for tuple and any ``HeaderTuple``. + """ + for header in headers: + if isinstance(header, HeaderTuple): + yield header.__class__(header[0].lower(), header[1]) + else: + yield (header[0].lower(), header[1]) + + +def _strip_surrounding_whitespace(headers, hdr_validation_flags): + """ + Given an iterable of header two-tuples, strip both leading and trailing + whitespace from both header names and header values. This generator + produces tuples that preserve the original type of the header tuple for + tuple and any ``HeaderTuple``. + """ + for header in headers: + if isinstance(header, HeaderTuple): + yield header.__class__(header[0].strip(), header[1].strip()) + else: + yield (header[0].strip(), header[1].strip()) + + +def _strip_connection_headers(headers, hdr_validation_flags): + """ + Strip any connection headers as per RFC7540 § 8.1.2.2. + """ + for header in headers: + if header[0] not in CONNECTION_HEADERS: + yield header + + +def _check_sent_host_authority_header(headers, hdr_validation_flags): + """ + Raises an InvalidHeaderBlockError if we try to send a header block + that does not contain an :authority or a Host header, or if + the header block contains both fields, but their values do not match. + """ + # We only expect to see :authority and Host headers on request header + # blocks that aren't trailers, so skip this validation if this is a + # response header or we're looking at trailer blocks. + skip_validation = ( + hdr_validation_flags.is_response_header or + hdr_validation_flags.is_trailer + ) + if skip_validation: + return headers + + return _validate_host_authority_header(headers) + + +def _combine_cookie_fields(headers, hdr_validation_flags): + """ + RFC 7540 § 8.1.2.5 allows HTTP/2 clients to split the Cookie header field, + which must normally appear only once, into multiple fields for better + compression. However, they MUST be joined back up again when received. + This normalization step applies that transform. The side-effect is that + all cookie fields now appear *last* in the header block. + """ + # There is a problem here about header indexing. Specifically, it's + # possible that all these cookies are sent with different header indexing + # values. At this point it shouldn't matter too much, so we apply our own + # logic and make them never-indexed. + cookies = [] + for header in headers: + if header[0] == b'cookie': + cookies.append(header[1]) + else: + yield header + if cookies: + cookie_val = b'; '.join(cookies) + yield NeverIndexedHeaderTuple(b'cookie', cookie_val) + + +def normalize_outbound_headers(headers, hdr_validation_flags): + """ + Normalizes a header sequence that we are about to send. + + :param headers: The HTTP header set. + :param hdr_validation_flags: An instance of HeaderValidationFlags. + """ + headers = _lowercase_header_names(headers, hdr_validation_flags) + headers = _strip_surrounding_whitespace(headers, hdr_validation_flags) + headers = _strip_connection_headers(headers, hdr_validation_flags) + headers = _secure_headers(headers, hdr_validation_flags) + + return headers + + +def normalize_inbound_headers(headers, hdr_validation_flags): + """ + Normalizes a header sequence that we have received. + + :param headers: The HTTP header set. + :param hdr_validation_flags: An instance of HeaderValidationFlags + """ + headers = _combine_cookie_fields(headers, hdr_validation_flags) + return headers + + +def validate_outbound_headers(headers, hdr_validation_flags): + """ + Validates and normalizes a header sequence that we are about to send. + + :param headers: The HTTP header set. + :param hdr_validation_flags: An instance of HeaderValidationFlags. + """ + headers = _reject_te( + headers, hdr_validation_flags + ) + headers = _reject_connection_header( + headers, hdr_validation_flags + ) + headers = _reject_pseudo_header_fields( + headers, hdr_validation_flags + ) + headers = _check_sent_host_authority_header( + headers, hdr_validation_flags + ) + headers = _check_path_header(headers, hdr_validation_flags) + + return headers + + +class SizeLimitDict(collections.OrderedDict): + + def __init__(self, *args, **kwargs): + self._size_limit = kwargs.pop("size_limit", None) + super(SizeLimitDict, self).__init__(*args, **kwargs) + + self._check_size_limit() + + def __setitem__(self, key, value): + super(SizeLimitDict, self).__setitem__(key, value) + + self._check_size_limit() + + def _check_size_limit(self): + if self._size_limit is not None: + while len(self) > self._size_limit: + self.popitem(last=False) diff --git a/py3/lib/python3.6/site-packages/h2/windows.py b/py3/lib/python3.6/site-packages/h2/windows.py new file mode 100644 index 0000000..6656975 --- /dev/null +++ b/py3/lib/python3.6/site-packages/h2/windows.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +""" +h2/windows +~~~~~~~~~~ + +Defines tools for managing HTTP/2 flow control windows. + +The objects defined in this module are used to automatically manage HTTP/2 +flow control windows. Specifically, they keep track of what the size of the +window is, how much data has been consumed from that window, and how much data +the user has already used. It then implements a basic algorithm that attempts +to manage the flow control window without user input, trying to ensure that it +does not emit too many WINDOW_UPDATE frames. +""" +from __future__ import division + +from .exceptions import FlowControlError + + +# The largest acceptable value for a HTTP/2 flow control window. +LARGEST_FLOW_CONTROL_WINDOW = 2**31 - 1 + + +class WindowManager(object): + """ + A basic HTTP/2 window manager. + + :param max_window_size: The maximum size of the flow control window. + :type max_window_size: ``int`` + """ + def __init__(self, max_window_size): + assert max_window_size <= LARGEST_FLOW_CONTROL_WINDOW + self.max_window_size = max_window_size + self.current_window_size = max_window_size + self._bytes_processed = 0 + + def window_consumed(self, size): + """ + We have received a certain number of bytes from the remote peer. This + necessarily shrinks the flow control window! + + :param size: The number of flow controlled bytes we received from the + remote peer. + :type size: ``int`` + :returns: Nothing. + :rtype: ``None`` + """ + self.current_window_size -= size + if self.current_window_size < 0: + raise FlowControlError("Flow control window shrunk below 0") + + def window_opened(self, size): + """ + The flow control window has been incremented, either because of manual + flow control management or because of the user changing the flow + control settings. This can have the effect of increasing what we + consider to be the "maximum" flow control window size. + + This does not increase our view of how many bytes have been processed, + only of how much space is in the window. + + :param size: The increment to the flow control window we received. + :type size: ``int`` + :returns: Nothing + :rtype: ``None`` + """ + self.current_window_size += size + + if self.current_window_size > LARGEST_FLOW_CONTROL_WINDOW: + raise FlowControlError( + "Flow control window mustn't exceed %d" % + LARGEST_FLOW_CONTROL_WINDOW + ) + + if self.current_window_size > self.max_window_size: + self.max_window_size = self.current_window_size + + def process_bytes(self, size): + """ + The application has informed us that it has processed a certain number + of bytes. This may cause us to want to emit a window update frame. If + we do want to emit a window update frame, this method will return the + number of bytes that we should increment the window by. + + :param size: The number of flow controlled bytes that the application + has processed. + :type size: ``int`` + :returns: The number of bytes to increment the flow control window by, + or ``None``. + :rtype: ``int`` or ``None`` + """ + self._bytes_processed += size + return self._maybe_update_window() + + def _maybe_update_window(self): + """ + Run the algorithm. + + Our current algorithm can be described like this. + + 1. If no bytes have been processed, we immediately return 0. There is + no meaningful way for us to hand space in the window back to the + remote peer, so let's not even try. + 2. If there is no space in the flow control window, and we have + processed at least 1024 bytes (or 1/4 of the window, if the window + is smaller), we will emit a window update frame. This is to avoid + the risk of blocking a stream altogether. + 3. If there is space in the flow control window, and we have processed + at least 1/2 of the window worth of bytes, we will emit a window + update frame. This is to minimise the number of window update frames + we have to emit. + + In a healthy system with large flow control windows, this will + irregularly emit WINDOW_UPDATE frames. This prevents us starving the + connection by emitting eleventy bajillion WINDOW_UPDATE frames, + especially in situations where the remote peer is sending a lot of very + small DATA frames. + """ + # TODO: Can the window be smaller than 1024 bytes? If not, we can + # streamline this algorithm. + if not self._bytes_processed: + return None + + max_increment = (self.max_window_size - self.current_window_size) + increment = 0 + + # Note that, even though we may increment less than _bytes_processed, + # we still want to set it to zero whenever we emit an increment. This + # is because we'll always increment up to the maximum we can. + if (self.current_window_size == 0) and ( + self._bytes_processed > min(1024, self.max_window_size // 4)): + increment = min(self._bytes_processed, max_increment) + self._bytes_processed = 0 + elif self._bytes_processed >= (self.max_window_size // 2): + increment = min(self._bytes_processed, max_increment) + self._bytes_processed = 0 + + self.current_window_size += increment + return increment diff --git a/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/DESCRIPTION.rst b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..c7e95d8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,179 @@ +======================================== +hpack: HTTP/2 Header Encoding for Python +======================================== + +.. image:: https://raw.github.com/Lukasa/hyper/development/docs/source/images/hyper.png + +.. image:: https://travis-ci.org/python-hyper/hpack.png?branch=master + :target: https://travis-ci.org/python-hyper/hpack + +This module contains a pure-Python HTTP/2 header encoding (HPACK) logic for use +in Python programs that implement HTTP/2. It also contains a compatibility +layer that automatically enables the use of ``nghttp2`` if it's available. + +Documentation +============= + +Documentation is available at http://python-hyper.org/hpack/. + +Contributing +============ + +``hpack`` welcomes contributions from anyone! Unlike many other projects we are +happy to accept cosmetic contributions and small contributions, in addition to +large feature requests and changes. + +Before you contribute (either by opening an issue or filing a pull request), +please `read the contribution guidelines`_. + +.. _read the contribution guidelines: http://hyper.readthedocs.org/en/development/contributing.html + +License +======= + +``hpack`` is made available under the MIT License. For more details, see the +``LICENSE`` file in the repository. + +Authors +======= + +``hpack`` is maintained by Cory Benfield, with contributions from others. For +more details about the contributors, please see ``CONTRIBUTORS.rst``. + + +Release History +=============== + +3.0.0 (2017-03-29) +------------------ + +**API Changes (Backward Incompatible)** + +- Removed nghttp2 support. This support had rotted and was essentially + non-functional, so it has now been removed until someone has time to re-add + the support in a functional form. +- Attempts by the encoder to exceed the maximum allowed header table size via + dynamic table size updates (or the absence thereof) are now forbidden. + +**API Changes (Backward Compatible)** + +- Added a new ``InvalidTableSizeError`` thrown when the encoder does not + respect the maximum table size set by the user. +- Added a ``Decoder.max_allowed_table_size`` field that sets the maximum + allowed size of the decoder header table. See the documentation for an + indication of how this should be used. + +**Bugfixes** + +- Up to 25% performance improvement decoding HPACK-packed integers, depending + on the platform. +- HPACK now tolerates receiving multiple header table size changes in sequence, + rather than only one. +- HPACK now forbids header table size changes anywhere but first in a header + block, as required by RFC 7541 § 4.2. +- Other miscellaneous performance improvements. + +2.3.0 (2016-08-04) +------------------ + +**Security Fixes** + +- CVE-2016-6581: HPACK Bomb. This release now enforces a maximum value of the + decompressed size of the header list. This is to avoid the so-called "HPACK + Bomb" vulnerability, which is caused when a malicious peer sends a compressed + HPACK body that decompresses to a gigantic header list size. + + This also adds a ``OversizedHeaderListError``, which is thrown by the + ``decode`` method if the maximum header list size is being violated. This + places the HPACK decoder into a broken state: it must not be used after this + exception is thrown. + + This also adds a ``max_header_list_size`` to the ``Decoder`` object. This + controls the maximum allowable decompressed size of the header list. By + default this is set to 64kB. + +2.2.0 (2016-04-20) +------------------ + +**API Changes (Backward Compatible)** + +- Added ``HeaderTuple`` and ``NeverIndexedHeaderTuple`` classes that signal + whether a given header field may ever be indexed in HTTP/2 header + compression. +- Changed ``Decoder.decode()`` to return the newly added ``HeaderTuple`` class + and subclass. These objects behave like two-tuples, so this change does not + break working code. + +**Bugfixes** + +- Improve Huffman decoding speed by 4x using an approach borrowed from nghttp2. +- Improve HPACK decoding speed by 10% by caching header table sizes. + +2.1.1 (2016-03-16) +------------------ + +**Bugfixes** + +- When passing a dictionary or dictionary subclass to ``Encoder.encode``, HPACK + now ensures that HTTP/2 special headers (headers whose names begin with + ``:`` characters) appear first in the header block. + +2.1.0 (2016-02-02) +------------------ + +**API Changes (Backward Compatible)** + +- Added new ``InvalidTableIndex`` exception, a subclass of + ``HPACKDecodingError``. +- Instead of throwing ``IndexError`` when encountering invalid encoded integers + HPACK now throws ``HPACKDecodingError``. +- Instead of throwing ``UnicodeDecodeError`` when encountering headers that are + not UTF-8 encoded, HPACK now throws ``HPACKDecodingError``. +- Instead of throwing ``IndexError`` when encountering invalid table offsets, + HPACK now throws ``InvalidTableIndex``. +- Added ``raw`` flag to ``decode``, allowing ``decode`` to return bytes instead + of attempting to decode the headers as UTF-8. + +**Bugfixes** + +- ``memoryview`` objects are now used when decoding HPACK, improving the + performance by avoiding unnecessary data copies. + +2.0.1 (2015-11-09) +------------------ + +- Fixed a bug where the Python HPACK implementation would only emit header + table size changes for the total change between one header block and another, + rather than for the entire sequence of changes. + +2.0.0 (2015-10-12) +------------------ + +- Remove unused ``HPACKEncodingError``. +- Add the shortcut ability to import the public API (``Encoder``, ``Decoder``, + ``HPACKError``, ``HPACKDecodingError``) directly, rather than from + ``hpack.hpack``. + +1.1.0 (2015-07-07) +------------------ + +- Add support for emitting 'never indexed' header fields, by using an optional + third element in the header tuple. With thanks to @jimcarreer! + +1.0.1 (2015-04-19) +------------------ + +- Header fields that have names matching header table entries are now added to + the header table. This improves compression efficiency at the cost of + slightly more table operations. With thanks to `Tatsuhiro Tsujikawa`_. + +.. _Tatsuhiro Tsujikawa: https://github.com/tatsuhiro-t + +1.0.0 (2015-04-13) +------------------ + +- Initial fork of the code from `hyper`_. + +.. _hyper: https://hyper.readthedocs.org/ + + diff --git a/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/METADATA b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/METADATA new file mode 100644 index 0000000..8c818b7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/METADATA @@ -0,0 +1,201 @@ +Metadata-Version: 2.0 +Name: hpack +Version: 3.0.0 +Summary: Pure-Python HPACK header compression +Home-page: http://hyper.rtfd.org +Author: Cory Benfield +Author-email: cory@lukasa.co.uk +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython + +======================================== +hpack: HTTP/2 Header Encoding for Python +======================================== + +.. image:: https://raw.github.com/Lukasa/hyper/development/docs/source/images/hyper.png + +.. image:: https://travis-ci.org/python-hyper/hpack.png?branch=master + :target: https://travis-ci.org/python-hyper/hpack + +This module contains a pure-Python HTTP/2 header encoding (HPACK) logic for use +in Python programs that implement HTTP/2. It also contains a compatibility +layer that automatically enables the use of ``nghttp2`` if it's available. + +Documentation +============= + +Documentation is available at http://python-hyper.org/hpack/. + +Contributing +============ + +``hpack`` welcomes contributions from anyone! Unlike many other projects we are +happy to accept cosmetic contributions and small contributions, in addition to +large feature requests and changes. + +Before you contribute (either by opening an issue or filing a pull request), +please `read the contribution guidelines`_. + +.. _read the contribution guidelines: http://hyper.readthedocs.org/en/development/contributing.html + +License +======= + +``hpack`` is made available under the MIT License. For more details, see the +``LICENSE`` file in the repository. + +Authors +======= + +``hpack`` is maintained by Cory Benfield, with contributions from others. For +more details about the contributors, please see ``CONTRIBUTORS.rst``. + + +Release History +=============== + +3.0.0 (2017-03-29) +------------------ + +**API Changes (Backward Incompatible)** + +- Removed nghttp2 support. This support had rotted and was essentially + non-functional, so it has now been removed until someone has time to re-add + the support in a functional form. +- Attempts by the encoder to exceed the maximum allowed header table size via + dynamic table size updates (or the absence thereof) are now forbidden. + +**API Changes (Backward Compatible)** + +- Added a new ``InvalidTableSizeError`` thrown when the encoder does not + respect the maximum table size set by the user. +- Added a ``Decoder.max_allowed_table_size`` field that sets the maximum + allowed size of the decoder header table. See the documentation for an + indication of how this should be used. + +**Bugfixes** + +- Up to 25% performance improvement decoding HPACK-packed integers, depending + on the platform. +- HPACK now tolerates receiving multiple header table size changes in sequence, + rather than only one. +- HPACK now forbids header table size changes anywhere but first in a header + block, as required by RFC 7541 § 4.2. +- Other miscellaneous performance improvements. + +2.3.0 (2016-08-04) +------------------ + +**Security Fixes** + +- CVE-2016-6581: HPACK Bomb. This release now enforces a maximum value of the + decompressed size of the header list. This is to avoid the so-called "HPACK + Bomb" vulnerability, which is caused when a malicious peer sends a compressed + HPACK body that decompresses to a gigantic header list size. + + This also adds a ``OversizedHeaderListError``, which is thrown by the + ``decode`` method if the maximum header list size is being violated. This + places the HPACK decoder into a broken state: it must not be used after this + exception is thrown. + + This also adds a ``max_header_list_size`` to the ``Decoder`` object. This + controls the maximum allowable decompressed size of the header list. By + default this is set to 64kB. + +2.2.0 (2016-04-20) +------------------ + +**API Changes (Backward Compatible)** + +- Added ``HeaderTuple`` and ``NeverIndexedHeaderTuple`` classes that signal + whether a given header field may ever be indexed in HTTP/2 header + compression. +- Changed ``Decoder.decode()`` to return the newly added ``HeaderTuple`` class + and subclass. These objects behave like two-tuples, so this change does not + break working code. + +**Bugfixes** + +- Improve Huffman decoding speed by 4x using an approach borrowed from nghttp2. +- Improve HPACK decoding speed by 10% by caching header table sizes. + +2.1.1 (2016-03-16) +------------------ + +**Bugfixes** + +- When passing a dictionary or dictionary subclass to ``Encoder.encode``, HPACK + now ensures that HTTP/2 special headers (headers whose names begin with + ``:`` characters) appear first in the header block. + +2.1.0 (2016-02-02) +------------------ + +**API Changes (Backward Compatible)** + +- Added new ``InvalidTableIndex`` exception, a subclass of + ``HPACKDecodingError``. +- Instead of throwing ``IndexError`` when encountering invalid encoded integers + HPACK now throws ``HPACKDecodingError``. +- Instead of throwing ``UnicodeDecodeError`` when encountering headers that are + not UTF-8 encoded, HPACK now throws ``HPACKDecodingError``. +- Instead of throwing ``IndexError`` when encountering invalid table offsets, + HPACK now throws ``InvalidTableIndex``. +- Added ``raw`` flag to ``decode``, allowing ``decode`` to return bytes instead + of attempting to decode the headers as UTF-8. + +**Bugfixes** + +- ``memoryview`` objects are now used when decoding HPACK, improving the + performance by avoiding unnecessary data copies. + +2.0.1 (2015-11-09) +------------------ + +- Fixed a bug where the Python HPACK implementation would only emit header + table size changes for the total change between one header block and another, + rather than for the entire sequence of changes. + +2.0.0 (2015-10-12) +------------------ + +- Remove unused ``HPACKEncodingError``. +- Add the shortcut ability to import the public API (``Encoder``, ``Decoder``, + ``HPACKError``, ``HPACKDecodingError``) directly, rather than from + ``hpack.hpack``. + +1.1.0 (2015-07-07) +------------------ + +- Add support for emitting 'never indexed' header fields, by using an optional + third element in the header tuple. With thanks to @jimcarreer! + +1.0.1 (2015-04-19) +------------------ + +- Header fields that have names matching header table entries are now added to + the header table. This improves compression efficiency at the cost of + slightly more table operations. With thanks to `Tatsuhiro Tsujikawa`_. + +.. _Tatsuhiro Tsujikawa: https://github.com/tatsuhiro-t + +1.0.0 (2015-04-13) +------------------ + +- Initial fork of the code from `hyper`_. + +.. _hyper: https://hyper.readthedocs.org/ + + diff --git a/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/RECORD b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/RECORD new file mode 100644 index 0000000..1495f90 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/RECORD @@ -0,0 +1,27 @@ +hpack/__init__.py,sha256=Pt27T8qdtME8DpY-_rI0I_oBO670OCt_hymhJlBlc0Y,479 +hpack/compat.py,sha256=gIyA_FOrzA3wKPXpLqJcKU-iffSwGXdJzuQUN3AN5v4,679 +hpack/exceptions.py,sha256=Rrw1Fke5Cfq72OwvwrnS6JucBvw3_bxu3o_ujtAUlrM,974 +hpack/hpack.py,sha256=NTATROnUHErtssiSR08bMgV62ehc7uNvFPnKrJi5NGw,22772 +hpack/hpack_compat.py,sha256=fS9caWAND4k_g8ciXqDpvJunczssnvYAAFIHmGr8qzc,3596 +hpack/huffman.py,sha256=nOOZTGpOXGkwbrWPkYaHNt00RVeyQeN-Lf2SKsi96x4,2521 +hpack/huffman_constants.py,sha256=4szhXrK1EEXNMKZzbSUgrG8S07Vc3ALwj7QpwLTtNh0,4628 +hpack/huffman_table.py,sha256=5yijIuaylIePwkbhWMXKf3EXspeXKY7hakpvPdSRJkg,168580 +hpack/struct.py,sha256=rMfEqhtp6KTOZ0pk33iXTFVfuhVwEUO-XZjrsQMDZ5I,1052 +hpack/table.py,sha256=6cITVnI5X-x4wxu8uA0dp9kkMck8kDFAOUupDGy0Su8,8950 +hpack-3.0.0.dist-info/DESCRIPTION.rst,sha256=a3KmUrtMvB6_oOCsF-WO51EgKWNAu5JwhHb-cO2YQr8,6161 +hpack-3.0.0.dist-info/METADATA,sha256=v0XmuzSPr5PaSa65ecn0yq_jDNz83rTBgcS3CKcaEAo,6995 +hpack-3.0.0.dist-info/RECORD,, +hpack-3.0.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +hpack-3.0.0.dist-info/metadata.json,sha256=cHIa1A8fQfSGOPOb9YqdHEYzRVN1yqSmczK-UyefhJk,929 +hpack-3.0.0.dist-info/top_level.txt,sha256=nyrZLbQo-0nC6ot3YO_109pkUiTtK1M0wUJfLHuQceE,6 +hpack-3.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +hpack/__pycache__/huffman_constants.cpython-36.pyc,, +hpack/__pycache__/huffman.cpython-36.pyc,, +hpack/__pycache__/hpack.cpython-36.pyc,, +hpack/__pycache__/hpack_compat.cpython-36.pyc,, +hpack/__pycache__/compat.cpython-36.pyc,, +hpack/__pycache__/table.cpython-36.pyc,, +hpack/__pycache__/huffman_table.cpython-36.pyc,, +hpack/__pycache__/__init__.cpython-36.pyc,, +hpack/__pycache__/struct.cpython-36.pyc,, +hpack/__pycache__/exceptions.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/WHEEL b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/metadata.json b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/metadata.json new file mode 100644 index 0000000..022bcb6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython"], "extensions": {"python.details": {"contacts": [{"email": "cory@lukasa.co.uk", "name": "Cory Benfield", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://hyper.rtfd.org"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT License", "metadata_version": "2.0", "name": "hpack", "summary": "Pure-Python HPACK header compression", "version": "3.0.0"} \ No newline at end of file diff --git a/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..1a0ac48 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack-3.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +hpack diff --git a/py3/lib/python3.6/site-packages/hpack/__init__.py b/py3/lib/python3.6/site-packages/hpack/__init__.py new file mode 100644 index 0000000..22edde2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" +hpack +~~~~~ + +HTTP/2 header encoding for Python. +""" +from .hpack import Encoder, Decoder +from .struct import HeaderTuple, NeverIndexedHeaderTuple +from .exceptions import ( + HPACKError, HPACKDecodingError, InvalidTableIndex, OversizedHeaderListError +) + +__all__ = [ + 'Encoder', 'Decoder', 'HPACKError', 'HPACKDecodingError', + 'InvalidTableIndex', 'HeaderTuple', 'NeverIndexedHeaderTuple', + 'OversizedHeaderListError' +] + +__version__ = '3.0.0' diff --git a/py3/lib/python3.6/site-packages/hpack/compat.py b/py3/lib/python3.6/site-packages/hpack/compat.py new file mode 100644 index 0000000..4fcaad4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/compat.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +hpack/compat +~~~~~~~~~~~~ + +Normalizes the Python 2/3 API for internal use. +""" +import sys + + +_ver = sys.version_info +is_py2 = _ver[0] == 2 +is_py3 = _ver[0] == 3 + +if is_py2: + def to_byte(char): + return ord(char) + + def decode_hex(b): + return b.decode('hex') + + def to_bytes(b): + if isinstance(b, memoryview): + return b.tobytes() + else: + return bytes(b) + + unicode = unicode # noqa + bytes = str + +elif is_py3: + def to_byte(char): + return char + + def decode_hex(b): + return bytes.fromhex(b) + + def to_bytes(b): + return bytes(b) + + unicode = str + bytes = bytes diff --git a/py3/lib/python3.6/site-packages/hpack/exceptions.py b/py3/lib/python3.6/site-packages/hpack/exceptions.py new file mode 100644 index 0000000..571ba98 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/exceptions.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" +hyper/http20/exceptions +~~~~~~~~~~~~~~~~~~~~~~~ + +This defines exceptions used in the HTTP/2 portion of hyper. +""" + + +class HPACKError(Exception): + """ + The base class for all ``hpack`` exceptions. + """ + pass + + +class HPACKDecodingError(HPACKError): + """ + An error has been encountered while performing HPACK decoding. + """ + pass + + +class InvalidTableIndex(HPACKDecodingError): + """ + An invalid table index was received. + """ + pass + + +class OversizedHeaderListError(HPACKDecodingError): + """ + A header list that was larger than we allow has been received. This may be + a DoS attack. + + .. versionadded:: 2.3.0 + """ + pass + + +class InvalidTableSizeError(HPACKDecodingError): + """ + An attempt was made to change the decoder table size to a value larger than + allowed, or the list was shrunk and the remote peer didn't shrink their + table size. + + .. versionadded:: 3.0.0 + """ + pass diff --git a/py3/lib/python3.6/site-packages/hpack/hpack.py b/py3/lib/python3.6/site-packages/hpack/hpack.py new file mode 100644 index 0000000..57e8c7f --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/hpack.py @@ -0,0 +1,630 @@ +# -*- coding: utf-8 -*- +""" +hpack/hpack +~~~~~~~~~~~ + +Implements the HPACK header compression algorithm as detailed by the IETF. +""" +import logging + +from .table import HeaderTable, table_entry_size +from .compat import to_byte, to_bytes +from .exceptions import ( + HPACKDecodingError, OversizedHeaderListError, InvalidTableSizeError +) +from .huffman import HuffmanEncoder +from .huffman_constants import ( + REQUEST_CODES, REQUEST_CODES_LENGTH +) +from .huffman_table import decode_huffman +from .struct import HeaderTuple, NeverIndexedHeaderTuple + +log = logging.getLogger(__name__) + +INDEX_NONE = b'\x00' +INDEX_NEVER = b'\x10' +INDEX_INCREMENTAL = b'\x40' + +# Precompute 2^i for 1-8 for use in prefix calcs. +# Zero index is not used but there to save a subtraction +# as prefix numbers are not zero indexed. +_PREFIX_BIT_MAX_NUMBERS = [(2 ** i) - 1 for i in range(9)] + +try: # pragma: no cover + basestring = basestring +except NameError: # pragma: no cover + basestring = (str, bytes) + + +# We default the maximum header list we're willing to accept to 64kB. That's a +# lot of headers, but if applications want to raise it they can do. +DEFAULT_MAX_HEADER_LIST_SIZE = 2 ** 16 + + +def _unicode_if_needed(header, raw): + """ + Provides a header as a unicode string if raw is False, otherwise returns + it as a bytestring. + """ + name = to_bytes(header[0]) + value = to_bytes(header[1]) + if not raw: + name = name.decode('utf-8') + value = value.decode('utf-8') + return header.__class__(name, value) + + +def encode_integer(integer, prefix_bits): + """ + This encodes an integer according to the wacky integer encoding rules + defined in the HPACK spec. + """ + log.debug("Encoding %d with %d bits", integer, prefix_bits) + + if integer < 0: + raise ValueError( + "Can only encode positive integers, got %s" % integer + ) + + if prefix_bits < 1 or prefix_bits > 8: + raise ValueError( + "Prefix bits must be between 1 and 8, got %s" % prefix_bits + ) + + max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits] + + if integer < max_number: + return bytearray([integer]) # Seriously? + else: + elements = [max_number] + integer -= max_number + + while integer >= 128: + elements.append((integer & 127) + 128) + integer >>= 7 + + elements.append(integer) + + return bytearray(elements) + + +def decode_integer(data, prefix_bits): + """ + This decodes an integer according to the wacky integer encoding rules + defined in the HPACK spec. Returns a tuple of the decoded integer and the + number of bytes that were consumed from ``data`` in order to get that + integer. + """ + if prefix_bits < 1 or prefix_bits > 8: + raise ValueError( + "Prefix bits must be between 1 and 8, got %s" % prefix_bits + ) + + max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits] + index = 1 + shift = 0 + mask = (0xFF >> (8 - prefix_bits)) + + try: + number = to_byte(data[0]) & mask + if number == max_number: + while True: + next_byte = to_byte(data[index]) + index += 1 + + if next_byte >= 128: + number += (next_byte - 128) << shift + else: + number += next_byte << shift + break + shift += 7 + + except IndexError: + raise HPACKDecodingError( + "Unable to decode HPACK integer representation from %r" % data + ) + + log.debug("Decoded %d, consumed %d bytes", number, index) + + return number, index + + +def _dict_to_iterable(header_dict): + """ + This converts a dictionary to an iterable of two-tuples. This is a + HPACK-specific function becuase it pulls "special-headers" out first and + then emits them. + """ + assert isinstance(header_dict, dict) + keys = sorted( + header_dict.keys(), + key=lambda k: not _to_bytes(k).startswith(b':') + ) + for key in keys: + yield key, header_dict[key] + + +def _to_bytes(string): + """ + Convert string to bytes. + """ + if not isinstance(string, basestring): # pragma: no cover + string = str(string) + + return string if isinstance(string, bytes) else string.encode('utf-8') + + +class Encoder(object): + """ + An HPACK encoder object. This object takes HTTP headers and emits encoded + HTTP/2 header blocks. + """ + + def __init__(self): + self.header_table = HeaderTable() + self.huffman_coder = HuffmanEncoder( + REQUEST_CODES, REQUEST_CODES_LENGTH + ) + self.table_size_changes = [] + + @property + def header_table_size(self): + """ + Controls the size of the HPACK header table. + """ + return self.header_table.maxsize + + @header_table_size.setter + def header_table_size(self, value): + self.header_table.maxsize = value + if self.header_table.resized: + self.table_size_changes.append(value) + + def encode(self, headers, huffman=True): + """ + Takes a set of headers and encodes them into a HPACK-encoded header + block. + + :param headers: The headers to encode. Must be either an iterable of + tuples, an iterable of :class:`HeaderTuple + `, or a ``dict``. + + If an iterable of tuples, the tuples may be either + two-tuples or three-tuples. If they are two-tuples, the + tuples must be of the format ``(name, value)``. If they + are three-tuples, they must be of the format + ``(name, value, sensitive)``, where ``sensitive`` is a + boolean value indicating whether the header should be + added to header tables anywhere. If not present, + ``sensitive`` defaults to ``False``. + + If an iterable of :class:`HeaderTuple + `, the tuples must always be + two-tuples. Instead of using ``sensitive`` as a third + tuple entry, use :class:`NeverIndexedHeaderTuple + ` to request that + the field never be indexed. + + .. warning:: HTTP/2 requires that all special headers + (headers whose names begin with ``:`` characters) + appear at the *start* of the header block. While + this method will ensure that happens for ``dict`` + subclasses, callers using any other iterable of + tuples **must** ensure they place their special + headers at the start of the iterable. + + For efficiency reasons users should prefer to use + iterables of two-tuples: fixing the ordering of + dictionary headers is an expensive operation that + should be avoided if possible. + + :param huffman: (optional) Whether to Huffman-encode any header sent as + a literal value. Except for use when debugging, it is + recommended that this be left enabled. + + :returns: A bytestring containing the HPACK-encoded header block. + """ + # Transforming the headers into a header block is a procedure that can + # be modeled as a chain or pipe. First, the headers are encoded. This + # encoding can be done a number of ways. If the header name-value pair + # are already in the header table we can represent them using the + # indexed representation: the same is true if they are in the static + # table. Otherwise, a literal representation will be used. + log.debug("HPACK encoding %s", headers) + header_block = [] + + # Turn the headers into a list of tuples if possible. This is the + # natural way to interact with them in HPACK. Because dictionaries are + # un-ordered, we need to make sure we grab the "special" headers first. + if isinstance(headers, dict): + headers = _dict_to_iterable(headers) + + # Before we begin, if the header table size has been changed we need + # to signal all changes since last emission appropriately. + if self.header_table.resized: + header_block.append(self._encode_table_size_change()) + self.header_table.resized = False + + # Add each header to the header block + for header in headers: + sensitive = False + if isinstance(header, HeaderTuple): + sensitive = not header.indexable + elif len(header) > 2: + sensitive = header[2] + + header = (_to_bytes(header[0]), _to_bytes(header[1])) + header_block.append(self.add(header, sensitive, huffman)) + + header_block = b''.join(header_block) + + log.debug("Encoded header block to %s", header_block) + + return header_block + + def add(self, to_add, sensitive, huffman=False): + """ + This function takes a header key-value tuple and serializes it. + """ + log.debug("Adding %s to the header table", to_add) + + name, value = to_add + + # Set our indexing mode + indexbit = INDEX_INCREMENTAL if not sensitive else INDEX_NEVER + + # Search for a matching header in the header table. + match = self.header_table.search(name, value) + + if match is None: + # Not in the header table. Encode using the literal syntax, + # and add it to the header table. + encoded = self._encode_literal(name, value, indexbit, huffman) + if not sensitive: + self.header_table.add(name, value) + return encoded + + # The header is in the table, break out the values. If we matched + # perfectly, we can use the indexed representation: otherwise we + # can use the indexed literal. + index, name, perfect = match + + if perfect: + # Indexed representation. + encoded = self._encode_indexed(index) + else: + # Indexed literal. We are going to add header to the + # header table unconditionally. It is a future todo to + # filter out headers which are known to be ineffective for + # indexing since they just take space in the table and + # pushed out other valuable headers. + encoded = self._encode_indexed_literal( + index, value, indexbit, huffman + ) + if not sensitive: + self.header_table.add(name, value) + + return encoded + + def _encode_indexed(self, index): + """ + Encodes a header using the indexed representation. + """ + field = encode_integer(index, 7) + field[0] |= 0x80 # we set the top bit + return bytes(field) + + def _encode_literal(self, name, value, indexbit, huffman=False): + """ + Encodes a header with a literal name and literal value. If ``indexing`` + is True, the header will be added to the header table: otherwise it + will not. + """ + if huffman: + name = self.huffman_coder.encode(name) + value = self.huffman_coder.encode(value) + + name_len = encode_integer(len(name), 7) + value_len = encode_integer(len(value), 7) + + if huffman: + name_len[0] |= 0x80 + value_len[0] |= 0x80 + + return b''.join( + [indexbit, bytes(name_len), name, bytes(value_len), value] + ) + + def _encode_indexed_literal(self, index, value, indexbit, huffman=False): + """ + Encodes a header with an indexed name and a literal value and performs + incremental indexing. + """ + if indexbit != INDEX_INCREMENTAL: + prefix = encode_integer(index, 4) + else: + prefix = encode_integer(index, 6) + + prefix[0] |= ord(indexbit) + + if huffman: + value = self.huffman_coder.encode(value) + + value_len = encode_integer(len(value), 7) + + if huffman: + value_len[0] |= 0x80 + + return b''.join([bytes(prefix), bytes(value_len), value]) + + def _encode_table_size_change(self): + """ + Produces the encoded form of all header table size change context + updates. + """ + block = b'' + for size_bytes in self.table_size_changes: + size_bytes = encode_integer(size_bytes, 5) + size_bytes[0] |= 0x20 + block += bytes(size_bytes) + self.table_size_changes = [] + return block + + +class Decoder(object): + """ + An HPACK decoder object. + + .. versionchanged:: 2.3.0 + Added ``max_header_list_size`` argument. + + :param max_header_list_size: The maximum decompressed size we will allow + for any single header block. This is a protection against DoS attacks + that attempt to force the application to expand a relatively small + amount of data into a really large header list, allowing enormous + amounts of memory to be allocated. + + If this amount of data is exceeded, a `OversizedHeaderListError + ` exception will be raised. At this + point the connection should be shut down, as the HPACK state will no + longer be useable. + + Defaults to 64kB. + :type max_header_list_size: ``int`` + """ + def __init__(self, max_header_list_size=DEFAULT_MAX_HEADER_LIST_SIZE): + self.header_table = HeaderTable() + + #: The maximum decompressed size we will allow for any single header + #: block. This is a protection against DoS attacks that attempt to + #: force the application to expand a relatively small amount of data + #: into a really large header list, allowing enormous amounts of memory + #: to be allocated. + #: + #: If this amount of data is exceeded, a `OversizedHeaderListError + #: ` exception will be raised. At this + #: point the connection should be shut down, as the HPACK state will no + #: longer be usable. + #: + #: Defaults to 64kB. + #: + #: .. versionadded:: 2.3.0 + self.max_header_list_size = max_header_list_size + + #: Maximum allowed header table size. + #: + #: A HTTP/2 implementation should set this to the most recent value of + #: SETTINGS_HEADER_TABLE_SIZE that it sent *and has received an ACK + #: for*. Once this setting is set, the actual header table size will be + #: checked at the end of each decoding run and whenever it is changed, + #: to confirm that it fits in this size. + self.max_allowed_table_size = self.header_table.maxsize + + @property + def header_table_size(self): + """ + Controls the size of the HPACK header table. + """ + return self.header_table.maxsize + + @header_table_size.setter + def header_table_size(self, value): + self.header_table.maxsize = value + + def decode(self, data, raw=False): + """ + Takes an HPACK-encoded header block and decodes it into a header set. + + :param data: A bytestring representing a complete HPACK-encoded header + block. + :param raw: (optional) Whether to return the headers as tuples of raw + byte strings or to decode them as UTF-8 before returning + them. The default value is False, which returns tuples of + Unicode strings + :returns: A list of two-tuples of ``(name, value)`` representing the + HPACK-encoded headers, in the order they were decoded. + :raises HPACKDecodingError: If an error is encountered while decoding + the header block. + """ + log.debug("Decoding %s", data) + + data_mem = memoryview(data) + headers = [] + data_len = len(data) + inflated_size = 0 + current_index = 0 + + while current_index < data_len: + # Work out what kind of header we're decoding. + # If the high bit is 1, it's an indexed field. + current = to_byte(data[current_index]) + indexed = True if current & 0x80 else False + + # Otherwise, if the second-highest bit is 1 it's a field that does + # alter the header table. + literal_index = True if current & 0x40 else False + + # Otherwise, if the third-highest bit is 1 it's an encoding context + # update. + encoding_update = True if current & 0x20 else False + + if indexed: + header, consumed = self._decode_indexed( + data_mem[current_index:] + ) + elif literal_index: + # It's a literal header that does affect the header table. + header, consumed = self._decode_literal_index( + data_mem[current_index:] + ) + elif encoding_update: + # It's an update to the encoding context. These are forbidden + # in a header block after any actual header. + if headers: + raise HPACKDecodingError( + "Table size update not at the start of the block" + ) + consumed = self._update_encoding_context( + data_mem[current_index:] + ) + header = None + else: + # It's a literal header that does not affect the header table. + header, consumed = self._decode_literal_no_index( + data_mem[current_index:] + ) + + if header: + headers.append(header) + inflated_size += table_entry_size(*header) + + if inflated_size > self.max_header_list_size: + raise OversizedHeaderListError( + "A header list larger than %d has been received" % + self.max_header_list_size + ) + + current_index += consumed + + # Confirm that the table size is lower than the maximum. We do this + # here to ensure that we catch when the max has been *shrunk* and the + # remote peer hasn't actually done that. + self._assert_valid_table_size() + + try: + return [_unicode_if_needed(h, raw) for h in headers] + except UnicodeDecodeError: + raise HPACKDecodingError("Unable to decode headers as UTF-8.") + + def _assert_valid_table_size(self): + """ + Check that the table size set by the encoder is lower than the maximum + we expect to have. + """ + if self.header_table_size > self.max_allowed_table_size: + raise InvalidTableSizeError( + "Encoder did not shrink table size to within the max" + ) + + def _update_encoding_context(self, data): + """ + Handles a byte that updates the encoding context. + """ + # We've been asked to resize the header table. + new_size, consumed = decode_integer(data, 5) + if new_size > self.max_allowed_table_size: + raise InvalidTableSizeError( + "Encoder exceeded max allowable table size" + ) + self.header_table_size = new_size + return consumed + + def _decode_indexed(self, data): + """ + Decodes a header represented using the indexed representation. + """ + index, consumed = decode_integer(data, 7) + header = HeaderTuple(*self.header_table.get_by_index(index)) + log.debug("Decoded %s, consumed %d", header, consumed) + return header, consumed + + def _decode_literal_no_index(self, data): + return self._decode_literal(data, False) + + def _decode_literal_index(self, data): + return self._decode_literal(data, True) + + def _decode_literal(self, data, should_index): + """ + Decodes a header represented with a literal. + """ + total_consumed = 0 + + # When should_index is true, if the low six bits of the first byte are + # nonzero, the header name is indexed. + # When should_index is false, if the low four bits of the first byte + # are nonzero the header name is indexed. + if should_index: + indexed_name = to_byte(data[0]) & 0x3F + name_len = 6 + not_indexable = False + else: + high_byte = to_byte(data[0]) + indexed_name = high_byte & 0x0F + name_len = 4 + not_indexable = high_byte & 0x10 + + if indexed_name: + # Indexed header name. + index, consumed = decode_integer(data, name_len) + name = self.header_table.get_by_index(index)[0] + + total_consumed = consumed + length = 0 + else: + # Literal header name. The first byte was consumed, so we need to + # move forward. + data = data[1:] + + length, consumed = decode_integer(data, 7) + name = data[consumed:consumed + length] + if len(name) != length: + raise HPACKDecodingError("Truncated header block") + + if to_byte(data[0]) & 0x80: + name = decode_huffman(name) + total_consumed = consumed + length + 1 # Since we moved forward 1. + + data = data[consumed + length:] + + # The header value is definitely length-based. + length, consumed = decode_integer(data, 7) + value = data[consumed:consumed + length] + if len(value) != length: + raise HPACKDecodingError("Truncated header block") + + if to_byte(data[0]) & 0x80: + value = decode_huffman(value) + + # Updated the total consumed length. + total_consumed += length + consumed + + # If we have been told never to index the header field, encode that in + # the tuple we use. + if not_indexable: + header = NeverIndexedHeaderTuple(name, value) + else: + header = HeaderTuple(name, value) + + # If we've been asked to index this, add it to the header table. + if should_index: + self.header_table.add(name, value) + + log.debug( + "Decoded %s, total consumed %d bytes, indexed %s", + header, + total_consumed, + should_index + ) + + return header, total_consumed diff --git a/py3/lib/python3.6/site-packages/hpack/hpack_compat.py b/py3/lib/python3.6/site-packages/hpack/hpack_compat.py new file mode 100644 index 0000000..e5a76d7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/hpack_compat.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +""" +hpack/hpack_compat +~~~~~~~~~~~~~~~~~~ + +Provides an abstraction layer over two HPACK implementations. + +This module has a pure-Python greenfield HPACK implementation that can be used +on all Python platforms. However, this implementation is both slower and more +memory-hungry than could be achieved with a C-language version. Additionally, +nghttp2's HPACK implementation currently achieves better compression ratios +than hyper's in almost all benchmarks. + +For those who care about efficiency and speed in HPACK, this module allows you +to use nghttp2's HPACK implementation instead of ours. This module detects +whether the nghttp2 bindings are installed, and if they are it wraps them in +a hpack-compatible API and uses them instead of its own. If not, it falls back +to the built-in Python bindings. +""" +import logging +from .hpack import _to_bytes + +log = logging.getLogger(__name__) + +# Attempt to import nghttp2. +try: + import nghttp2 + USE_NGHTTP2 = True + log.debug("Using nghttp2's HPACK implementation.") +except ImportError: + USE_NGHTTP2 = False + log.debug("Using our pure-Python HPACK implementation.") + +if USE_NGHTTP2: # noqa + class Encoder(object): + """ + An HPACK encoder object. This object takes HTTP headers and emits + encoded HTTP/2 header blocks. + """ + def __init__(self): + self._e = nghttp2.HDDeflater() + + @property + def header_table_size(self): + """ + Returns the header table size. For the moment this isn't + useful, so we don't use it. + """ + raise NotImplementedError() + + @header_table_size.setter + def header_table_size(self, value): + log.debug("Setting header table size to %d", value) + self._e.change_table_size(value) + + def encode(self, headers, huffman=True): + """ + Encode the headers. The huffman parameter has no effect, it is + simply present for compatibility. + """ + log.debug("HPACK encoding %s", headers) + + # Turn the headers into a list of tuples if possible. This is the + # natural way to interact with them in HPACK. + if isinstance(headers, dict): + headers = headers.items() + + # Next, walk across the headers and turn them all into bytestrings. + headers = [(_to_bytes(n), _to_bytes(v)) for n, v in headers] + + # Now, let nghttp2 do its thing. + header_block = self._e.deflate(headers) + + return header_block + + class Decoder(object): + """ + An HPACK decoder object. + """ + def __init__(self): + self._d = nghttp2.HDInflater() + + @property + def header_table_size(self): + """ + Returns the header table size. For the moment this isn't + useful, so we don't use it. + """ + raise NotImplementedError() + + @header_table_size.setter + def header_table_size(self, value): + log.debug("Setting header table size to %d", value) + self._d.change_table_size(value) + + def decode(self, data): + """ + Takes an HPACK-encoded header block and decodes it into a header + set. + """ + log.debug("Decoding %s", data) + + headers = self._d.inflate(data) + return [(n.decode('utf-8'), v.decode('utf-8')) for n, v in headers] +else: + # Grab the built-in encoder and decoder. + from .hpack import Encoder, Decoder # noqa diff --git a/py3/lib/python3.6/site-packages/hpack/huffman.py b/py3/lib/python3.6/site-packages/hpack/huffman.py new file mode 100644 index 0000000..159569c --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/huffman.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +""" +hpack/huffman_decoder +~~~~~~~~~~~~~~~~~~~~~ + +An implementation of a bitwise prefix tree specially built for decoding +Huffman-coded content where we already know the Huffman table. +""" +from .compat import to_byte, decode_hex + + +class HuffmanEncoder(object): + """ + Encodes a string according to the Huffman encoding table defined in the + HPACK specification. + """ + def __init__(self, huffman_code_list, huffman_code_list_lengths): + self.huffman_code_list = huffman_code_list + self.huffman_code_list_lengths = huffman_code_list_lengths + + def encode(self, bytes_to_encode): + """ + Given a string of bytes, encodes them according to the HPACK Huffman + specification. + """ + # If handed the empty string, just immediately return. + if not bytes_to_encode: + return b'' + + final_num = 0 + final_int_len = 0 + + # Turn each byte into its huffman code. These codes aren't necessarily + # octet aligned, so keep track of how far through an octet we are. To + # handle this cleanly, just use a single giant integer. + for char in bytes_to_encode: + byte = to_byte(char) + bin_int_len = self.huffman_code_list_lengths[byte] + bin_int = self.huffman_code_list[byte] & ( + 2 ** (bin_int_len + 1) - 1 + ) + final_num <<= bin_int_len + final_num |= bin_int + final_int_len += bin_int_len + + # Pad out to an octet with ones. + bits_to_be_padded = (8 - (final_int_len % 8)) % 8 + final_num <<= bits_to_be_padded + final_num |= (1 << bits_to_be_padded) - 1 + + # Convert the number to hex and strip off the leading '0x' and the + # trailing 'L', if present. + final_num = hex(final_num)[2:].rstrip('L') + + # If this is odd, prepend a zero. + final_num = '0' + final_num if len(final_num) % 2 != 0 else final_num + + # This number should have twice as many digits as bytes. If not, we're + # missing some leading zeroes. Work out how many bytes we want and how + # many digits we have, then add the missing zero digits to the front. + total_bytes = (final_int_len + bits_to_be_padded) // 8 + expected_digits = total_bytes * 2 + + if len(final_num) != expected_digits: + missing_digits = expected_digits - len(final_num) + final_num = ('0' * missing_digits) + final_num + + return decode_hex(final_num) diff --git a/py3/lib/python3.6/site-packages/hpack/huffman_constants.py b/py3/lib/python3.6/site-packages/hpack/huffman_constants.py new file mode 100644 index 0000000..c2b3bb2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/huffman_constants.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +""" +hpack/huffman_constants +~~~~~~~~~~~~~~~~~~~~~~~ + +Defines the constant Huffman table. This takes up an upsetting amount of space, +but c'est la vie. +""" + +REQUEST_CODES = [ + 0x1ff8, + 0x7fffd8, + 0xfffffe2, + 0xfffffe3, + 0xfffffe4, + 0xfffffe5, + 0xfffffe6, + 0xfffffe7, + 0xfffffe8, + 0xffffea, + 0x3ffffffc, + 0xfffffe9, + 0xfffffea, + 0x3ffffffd, + 0xfffffeb, + 0xfffffec, + 0xfffffed, + 0xfffffee, + 0xfffffef, + 0xffffff0, + 0xffffff1, + 0xffffff2, + 0x3ffffffe, + 0xffffff3, + 0xffffff4, + 0xffffff5, + 0xffffff6, + 0xffffff7, + 0xffffff8, + 0xffffff9, + 0xffffffa, + 0xffffffb, + 0x14, + 0x3f8, + 0x3f9, + 0xffa, + 0x1ff9, + 0x15, + 0xf8, + 0x7fa, + 0x3fa, + 0x3fb, + 0xf9, + 0x7fb, + 0xfa, + 0x16, + 0x17, + 0x18, + 0x0, + 0x1, + 0x2, + 0x19, + 0x1a, + 0x1b, + 0x1c, + 0x1d, + 0x1e, + 0x1f, + 0x5c, + 0xfb, + 0x7ffc, + 0x20, + 0xffb, + 0x3fc, + 0x1ffa, + 0x21, + 0x5d, + 0x5e, + 0x5f, + 0x60, + 0x61, + 0x62, + 0x63, + 0x64, + 0x65, + 0x66, + 0x67, + 0x68, + 0x69, + 0x6a, + 0x6b, + 0x6c, + 0x6d, + 0x6e, + 0x6f, + 0x70, + 0x71, + 0x72, + 0xfc, + 0x73, + 0xfd, + 0x1ffb, + 0x7fff0, + 0x1ffc, + 0x3ffc, + 0x22, + 0x7ffd, + 0x3, + 0x23, + 0x4, + 0x24, + 0x5, + 0x25, + 0x26, + 0x27, + 0x6, + 0x74, + 0x75, + 0x28, + 0x29, + 0x2a, + 0x7, + 0x2b, + 0x76, + 0x2c, + 0x8, + 0x9, + 0x2d, + 0x77, + 0x78, + 0x79, + 0x7a, + 0x7b, + 0x7ffe, + 0x7fc, + 0x3ffd, + 0x1ffd, + 0xffffffc, + 0xfffe6, + 0x3fffd2, + 0xfffe7, + 0xfffe8, + 0x3fffd3, + 0x3fffd4, + 0x3fffd5, + 0x7fffd9, + 0x3fffd6, + 0x7fffda, + 0x7fffdb, + 0x7fffdc, + 0x7fffdd, + 0x7fffde, + 0xffffeb, + 0x7fffdf, + 0xffffec, + 0xffffed, + 0x3fffd7, + 0x7fffe0, + 0xffffee, + 0x7fffe1, + 0x7fffe2, + 0x7fffe3, + 0x7fffe4, + 0x1fffdc, + 0x3fffd8, + 0x7fffe5, + 0x3fffd9, + 0x7fffe6, + 0x7fffe7, + 0xffffef, + 0x3fffda, + 0x1fffdd, + 0xfffe9, + 0x3fffdb, + 0x3fffdc, + 0x7fffe8, + 0x7fffe9, + 0x1fffde, + 0x7fffea, + 0x3fffdd, + 0x3fffde, + 0xfffff0, + 0x1fffdf, + 0x3fffdf, + 0x7fffeb, + 0x7fffec, + 0x1fffe0, + 0x1fffe1, + 0x3fffe0, + 0x1fffe2, + 0x7fffed, + 0x3fffe1, + 0x7fffee, + 0x7fffef, + 0xfffea, + 0x3fffe2, + 0x3fffe3, + 0x3fffe4, + 0x7ffff0, + 0x3fffe5, + 0x3fffe6, + 0x7ffff1, + 0x3ffffe0, + 0x3ffffe1, + 0xfffeb, + 0x7fff1, + 0x3fffe7, + 0x7ffff2, + 0x3fffe8, + 0x1ffffec, + 0x3ffffe2, + 0x3ffffe3, + 0x3ffffe4, + 0x7ffffde, + 0x7ffffdf, + 0x3ffffe5, + 0xfffff1, + 0x1ffffed, + 0x7fff2, + 0x1fffe3, + 0x3ffffe6, + 0x7ffffe0, + 0x7ffffe1, + 0x3ffffe7, + 0x7ffffe2, + 0xfffff2, + 0x1fffe4, + 0x1fffe5, + 0x3ffffe8, + 0x3ffffe9, + 0xffffffd, + 0x7ffffe3, + 0x7ffffe4, + 0x7ffffe5, + 0xfffec, + 0xfffff3, + 0xfffed, + 0x1fffe6, + 0x3fffe9, + 0x1fffe7, + 0x1fffe8, + 0x7ffff3, + 0x3fffea, + 0x3fffeb, + 0x1ffffee, + 0x1ffffef, + 0xfffff4, + 0xfffff5, + 0x3ffffea, + 0x7ffff4, + 0x3ffffeb, + 0x7ffffe6, + 0x3ffffec, + 0x3ffffed, + 0x7ffffe7, + 0x7ffffe8, + 0x7ffffe9, + 0x7ffffea, + 0x7ffffeb, + 0xffffffe, + 0x7ffffec, + 0x7ffffed, + 0x7ffffee, + 0x7ffffef, + 0x7fffff0, + 0x3ffffee, + 0x3fffffff, +] + +REQUEST_CODES_LENGTH = [ + 13, 23, 28, 28, 28, 28, 28, 28, 28, 24, 30, 28, 28, 30, 28, 28, + 28, 28, 28, 28, 28, 28, 30, 28, 28, 28, 28, 28, 28, 28, 28, 28, + 6, 10, 10, 12, 13, 6, 8, 11, 10, 10, 8, 11, 8, 6, 6, 6, + 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 7, 8, 15, 6, 12, 10, + 13, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, + 7, 7, 7, 7, 7, 7, 7, 7, 8, 7, 8, 13, 19, 13, 14, 6, + 15, 5, 6, 5, 6, 5, 6, 6, 6, 5, 7, 7, 6, 6, 6, 5, + 6, 7, 6, 5, 5, 6, 7, 7, 7, 7, 7, 15, 11, 14, 13, 28, + 20, 22, 20, 20, 22, 22, 22, 23, 22, 23, 23, 23, 23, 23, 24, 23, + 24, 24, 22, 23, 24, 23, 23, 23, 23, 21, 22, 23, 22, 23, 23, 24, + 22, 21, 20, 22, 22, 23, 23, 21, 23, 22, 22, 24, 21, 22, 23, 23, + 21, 21, 22, 21, 23, 22, 23, 23, 20, 22, 22, 22, 23, 22, 22, 23, + 26, 26, 20, 19, 22, 23, 22, 25, 26, 26, 26, 27, 27, 26, 24, 25, + 19, 21, 26, 27, 27, 26, 27, 24, 21, 21, 26, 26, 28, 27, 27, 27, + 20, 24, 20, 21, 22, 21, 21, 23, 22, 22, 25, 25, 24, 24, 26, 23, + 26, 27, 26, 26, 27, 27, 27, 27, 27, 28, 27, 27, 27, 27, 27, 26, + 30, +] diff --git a/py3/lib/python3.6/site-packages/hpack/huffman_table.py b/py3/lib/python3.6/site-packages/hpack/huffman_table.py new file mode 100644 index 0000000..c199ef5 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/huffman_table.py @@ -0,0 +1,4739 @@ +# -*- coding: utf-8 -*- +""" +hpack/huffman_table +~~~~~~~~~~~~~~~~~~~ + +This implementation of a Huffman decoding table for HTTP/2 is essentially a +Python port of the work originally done for nghttp2's Huffman decoding. For +this reason, while this file is made available under the MIT license as is the +rest of this module, this file is undoubtedly a derivative work of the nghttp2 +file ``nghttp2_hd_huffman_data.c``, obtained from +https://github.com/tatsuhiro-t/nghttp2/ at commit +d2b55ad1a245e1d1964579fa3fac36ebf3939e72. That work is made available under +the Apache 2.0 license under the following terms: + + Copyright (c) 2013 Tatsuhiro Tsujikawa + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +The essence of this approach is that it builds a finite state machine out of +4-bit nibbles of Huffman coded data. The input function passes 4 bits worth of +data to the state machine each time, which uses those 4 bits of data along with +the current accumulated state data to process the data given. + +For the sake of efficiency, the in-memory representation of the states, +transitions, and result values of the state machine are represented as a long +list containing three-tuples. This list is enormously long, and viewing it as +an in-memory representation is not very clear, but it is laid out here in a way +that is intended to be *somewhat* more clear. + +Essentially, the list is structured as 256 collections of 16 entries (one for +each nibble) of three-tuples. Each collection is called a "node", and the +zeroth collection is called the "root node". The state machine tracks one +value: the "state" byte. + +For each nibble passed to the state machine, it first multiplies the "state" +byte by 16 and adds the numerical value of the nibble. This number is the index +into the large flat list. + +The three-tuple that is found by looking up that index consists of three +values: + +- a new state value, used for subsequent decoding +- a collection of flags, used to determine whether data is emitted or whether + the state machine is complete. +- the byte value to emit, assuming that emitting a byte is required. + +The flags are consulted, if necessary a byte is emitted, and then the next +nibble is used. This continues until the state machine believes it has +completely Huffman-decoded the data. + +This approach has relatively little indirection, and therefore performs +relatively well, particularly on implementations like PyPy where the cost of +loops at the Python-level is not too expensive. The total number of loop +iterations is 4x the number of bytes passed to the decoder. +""" +from .exceptions import HPACKDecodingError + + +# This defines the state machine "class" at the top of the file. The reason we +# do this is to keep the terrifing monster state table at the *bottom* of the +# file so you don't have to actually *look* at the damn thing. +def decode_huffman(huffman_string): + """ + Given a bytestring of Huffman-encoded data for HPACK, returns a bytestring + of the decompressed data. + """ + if not huffman_string: + return b'' + + state = 0 + flags = 0 + decoded_bytes = bytearray() + + # Perversely, bytearrays are a lot more convenient across Python 2 and + # Python 3 because they behave *the same way* on both platforms. Given that + # we really do want numerical bytes when we iterate here, let's use a + # bytearray. + huffman_string = bytearray(huffman_string) + + # This loop is unrolled somewhat. Because we use a nibble, not a byte, we + # need to handle each nibble twice. We unroll that: it makes the loop body + # a bit longer, but that's ok. + for input_byte in huffman_string: + index = (state * 16) + (input_byte >> 4) + state, flags, output_byte = HUFFMAN_TABLE[index] + + if flags & HUFFMAN_FAIL: + raise HPACKDecodingError("Invalid Huffman String") + + if flags & HUFFMAN_EMIT_SYMBOL: + decoded_bytes.append(output_byte) + + index = (state * 16) + (input_byte & 0x0F) + state, flags, output_byte = HUFFMAN_TABLE[index] + + if flags & HUFFMAN_FAIL: + raise HPACKDecodingError("Invalid Huffman String") + + if flags & HUFFMAN_EMIT_SYMBOL: + decoded_bytes.append(output_byte) + + if not (flags & HUFFMAN_COMPLETE): + raise HPACKDecodingError("Incomplete Huffman string") + + return bytes(decoded_bytes) + + +# Some decoder flags to control state transitions. +HUFFMAN_COMPLETE = 1 +HUFFMAN_EMIT_SYMBOL = (1 << 1) +HUFFMAN_FAIL = (1 << 2) + +# This is the monster table. Avert your eyes, children. +HUFFMAN_TABLE = [ + # Node 0 (Root Node, never emits symbols.) + (4, 0, 0), + (5, 0, 0), + (7, 0, 0), + (8, 0, 0), + (11, 0, 0), + (12, 0, 0), + (16, 0, 0), + (19, 0, 0), + (25, 0, 0), + (28, 0, 0), + (32, 0, 0), + (35, 0, 0), + (42, 0, 0), + (49, 0, 0), + (57, 0, 0), + (64, HUFFMAN_COMPLETE, 0), + + # Node 1 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 48), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 49), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 50), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 97), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 99), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 101), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 105), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 111), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 115), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 116), + (13, 0, 0), + (14, 0, 0), + (17, 0, 0), + (18, 0, 0), + (20, 0, 0), + (21, 0, 0), + + # Node 2 + (1, HUFFMAN_EMIT_SYMBOL, 48), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 48), + (1, HUFFMAN_EMIT_SYMBOL, 49), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 49), + (1, HUFFMAN_EMIT_SYMBOL, 50), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 50), + (1, HUFFMAN_EMIT_SYMBOL, 97), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 97), + (1, HUFFMAN_EMIT_SYMBOL, 99), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 99), + (1, HUFFMAN_EMIT_SYMBOL, 101), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 101), + (1, HUFFMAN_EMIT_SYMBOL, 105), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 105), + (1, HUFFMAN_EMIT_SYMBOL, 111), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 111), + + # Node 3 + (2, HUFFMAN_EMIT_SYMBOL, 48), + (9, HUFFMAN_EMIT_SYMBOL, 48), + (23, HUFFMAN_EMIT_SYMBOL, 48), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 48), + (2, HUFFMAN_EMIT_SYMBOL, 49), + (9, HUFFMAN_EMIT_SYMBOL, 49), + (23, HUFFMAN_EMIT_SYMBOL, 49), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 49), + (2, HUFFMAN_EMIT_SYMBOL, 50), + (9, HUFFMAN_EMIT_SYMBOL, 50), + (23, HUFFMAN_EMIT_SYMBOL, 50), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 50), + (2, HUFFMAN_EMIT_SYMBOL, 97), + (9, HUFFMAN_EMIT_SYMBOL, 97), + (23, HUFFMAN_EMIT_SYMBOL, 97), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 97), + + # Node 4 + (3, HUFFMAN_EMIT_SYMBOL, 48), + (6, HUFFMAN_EMIT_SYMBOL, 48), + (10, HUFFMAN_EMIT_SYMBOL, 48), + (15, HUFFMAN_EMIT_SYMBOL, 48), + (24, HUFFMAN_EMIT_SYMBOL, 48), + (31, HUFFMAN_EMIT_SYMBOL, 48), + (41, HUFFMAN_EMIT_SYMBOL, 48), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 48), + (3, HUFFMAN_EMIT_SYMBOL, 49), + (6, HUFFMAN_EMIT_SYMBOL, 49), + (10, HUFFMAN_EMIT_SYMBOL, 49), + (15, HUFFMAN_EMIT_SYMBOL, 49), + (24, HUFFMAN_EMIT_SYMBOL, 49), + (31, HUFFMAN_EMIT_SYMBOL, 49), + (41, HUFFMAN_EMIT_SYMBOL, 49), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 49), + + # Node 5 + (3, HUFFMAN_EMIT_SYMBOL, 50), + (6, HUFFMAN_EMIT_SYMBOL, 50), + (10, HUFFMAN_EMIT_SYMBOL, 50), + (15, HUFFMAN_EMIT_SYMBOL, 50), + (24, HUFFMAN_EMIT_SYMBOL, 50), + (31, HUFFMAN_EMIT_SYMBOL, 50), + (41, HUFFMAN_EMIT_SYMBOL, 50), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 50), + (3, HUFFMAN_EMIT_SYMBOL, 97), + (6, HUFFMAN_EMIT_SYMBOL, 97), + (10, HUFFMAN_EMIT_SYMBOL, 97), + (15, HUFFMAN_EMIT_SYMBOL, 97), + (24, HUFFMAN_EMIT_SYMBOL, 97), + (31, HUFFMAN_EMIT_SYMBOL, 97), + (41, HUFFMAN_EMIT_SYMBOL, 97), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 97), + + # Node 6 + (2, HUFFMAN_EMIT_SYMBOL, 99), + (9, HUFFMAN_EMIT_SYMBOL, 99), + (23, HUFFMAN_EMIT_SYMBOL, 99), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 99), + (2, HUFFMAN_EMIT_SYMBOL, 101), + (9, HUFFMAN_EMIT_SYMBOL, 101), + (23, HUFFMAN_EMIT_SYMBOL, 101), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 101), + (2, HUFFMAN_EMIT_SYMBOL, 105), + (9, HUFFMAN_EMIT_SYMBOL, 105), + (23, HUFFMAN_EMIT_SYMBOL, 105), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 105), + (2, HUFFMAN_EMIT_SYMBOL, 111), + (9, HUFFMAN_EMIT_SYMBOL, 111), + (23, HUFFMAN_EMIT_SYMBOL, 111), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 111), + + # Node 7 + (3, HUFFMAN_EMIT_SYMBOL, 99), + (6, HUFFMAN_EMIT_SYMBOL, 99), + (10, HUFFMAN_EMIT_SYMBOL, 99), + (15, HUFFMAN_EMIT_SYMBOL, 99), + (24, HUFFMAN_EMIT_SYMBOL, 99), + (31, HUFFMAN_EMIT_SYMBOL, 99), + (41, HUFFMAN_EMIT_SYMBOL, 99), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 99), + (3, HUFFMAN_EMIT_SYMBOL, 101), + (6, HUFFMAN_EMIT_SYMBOL, 101), + (10, HUFFMAN_EMIT_SYMBOL, 101), + (15, HUFFMAN_EMIT_SYMBOL, 101), + (24, HUFFMAN_EMIT_SYMBOL, 101), + (31, HUFFMAN_EMIT_SYMBOL, 101), + (41, HUFFMAN_EMIT_SYMBOL, 101), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 101), + + # Node 8 + (3, HUFFMAN_EMIT_SYMBOL, 105), + (6, HUFFMAN_EMIT_SYMBOL, 105), + (10, HUFFMAN_EMIT_SYMBOL, 105), + (15, HUFFMAN_EMIT_SYMBOL, 105), + (24, HUFFMAN_EMIT_SYMBOL, 105), + (31, HUFFMAN_EMIT_SYMBOL, 105), + (41, HUFFMAN_EMIT_SYMBOL, 105), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 105), + (3, HUFFMAN_EMIT_SYMBOL, 111), + (6, HUFFMAN_EMIT_SYMBOL, 111), + (10, HUFFMAN_EMIT_SYMBOL, 111), + (15, HUFFMAN_EMIT_SYMBOL, 111), + (24, HUFFMAN_EMIT_SYMBOL, 111), + (31, HUFFMAN_EMIT_SYMBOL, 111), + (41, HUFFMAN_EMIT_SYMBOL, 111), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 111), + + # Node 9 + (1, HUFFMAN_EMIT_SYMBOL, 115), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 115), + (1, HUFFMAN_EMIT_SYMBOL, 116), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 116), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 32), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 37), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 45), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 46), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 47), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 51), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 52), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 53), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 54), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 55), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 56), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 57), + + # Node 10 + (2, HUFFMAN_EMIT_SYMBOL, 115), + (9, HUFFMAN_EMIT_SYMBOL, 115), + (23, HUFFMAN_EMIT_SYMBOL, 115), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 115), + (2, HUFFMAN_EMIT_SYMBOL, 116), + (9, HUFFMAN_EMIT_SYMBOL, 116), + (23, HUFFMAN_EMIT_SYMBOL, 116), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 116), + (1, HUFFMAN_EMIT_SYMBOL, 32), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 32), + (1, HUFFMAN_EMIT_SYMBOL, 37), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 37), + (1, HUFFMAN_EMIT_SYMBOL, 45), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 45), + (1, HUFFMAN_EMIT_SYMBOL, 46), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 46), + + # Node 11 + (3, HUFFMAN_EMIT_SYMBOL, 115), + (6, HUFFMAN_EMIT_SYMBOL, 115), + (10, HUFFMAN_EMIT_SYMBOL, 115), + (15, HUFFMAN_EMIT_SYMBOL, 115), + (24, HUFFMAN_EMIT_SYMBOL, 115), + (31, HUFFMAN_EMIT_SYMBOL, 115), + (41, HUFFMAN_EMIT_SYMBOL, 115), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 115), + (3, HUFFMAN_EMIT_SYMBOL, 116), + (6, HUFFMAN_EMIT_SYMBOL, 116), + (10, HUFFMAN_EMIT_SYMBOL, 116), + (15, HUFFMAN_EMIT_SYMBOL, 116), + (24, HUFFMAN_EMIT_SYMBOL, 116), + (31, HUFFMAN_EMIT_SYMBOL, 116), + (41, HUFFMAN_EMIT_SYMBOL, 116), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 116), + + # Node 12 + (2, HUFFMAN_EMIT_SYMBOL, 32), + (9, HUFFMAN_EMIT_SYMBOL, 32), + (23, HUFFMAN_EMIT_SYMBOL, 32), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 32), + (2, HUFFMAN_EMIT_SYMBOL, 37), + (9, HUFFMAN_EMIT_SYMBOL, 37), + (23, HUFFMAN_EMIT_SYMBOL, 37), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 37), + (2, HUFFMAN_EMIT_SYMBOL, 45), + (9, HUFFMAN_EMIT_SYMBOL, 45), + (23, HUFFMAN_EMIT_SYMBOL, 45), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 45), + (2, HUFFMAN_EMIT_SYMBOL, 46), + (9, HUFFMAN_EMIT_SYMBOL, 46), + (23, HUFFMAN_EMIT_SYMBOL, 46), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 46), + + # Node 13 + (3, HUFFMAN_EMIT_SYMBOL, 32), + (6, HUFFMAN_EMIT_SYMBOL, 32), + (10, HUFFMAN_EMIT_SYMBOL, 32), + (15, HUFFMAN_EMIT_SYMBOL, 32), + (24, HUFFMAN_EMIT_SYMBOL, 32), + (31, HUFFMAN_EMIT_SYMBOL, 32), + (41, HUFFMAN_EMIT_SYMBOL, 32), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 32), + (3, HUFFMAN_EMIT_SYMBOL, 37), + (6, HUFFMAN_EMIT_SYMBOL, 37), + (10, HUFFMAN_EMIT_SYMBOL, 37), + (15, HUFFMAN_EMIT_SYMBOL, 37), + (24, HUFFMAN_EMIT_SYMBOL, 37), + (31, HUFFMAN_EMIT_SYMBOL, 37), + (41, HUFFMAN_EMIT_SYMBOL, 37), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 37), + + # Node 14 + (3, HUFFMAN_EMIT_SYMBOL, 45), + (6, HUFFMAN_EMIT_SYMBOL, 45), + (10, HUFFMAN_EMIT_SYMBOL, 45), + (15, HUFFMAN_EMIT_SYMBOL, 45), + (24, HUFFMAN_EMIT_SYMBOL, 45), + (31, HUFFMAN_EMIT_SYMBOL, 45), + (41, HUFFMAN_EMIT_SYMBOL, 45), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 45), + (3, HUFFMAN_EMIT_SYMBOL, 46), + (6, HUFFMAN_EMIT_SYMBOL, 46), + (10, HUFFMAN_EMIT_SYMBOL, 46), + (15, HUFFMAN_EMIT_SYMBOL, 46), + (24, HUFFMAN_EMIT_SYMBOL, 46), + (31, HUFFMAN_EMIT_SYMBOL, 46), + (41, HUFFMAN_EMIT_SYMBOL, 46), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 46), + + # Node 15 + (1, HUFFMAN_EMIT_SYMBOL, 47), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 47), + (1, HUFFMAN_EMIT_SYMBOL, 51), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 51), + (1, HUFFMAN_EMIT_SYMBOL, 52), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 52), + (1, HUFFMAN_EMIT_SYMBOL, 53), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 53), + (1, HUFFMAN_EMIT_SYMBOL, 54), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 54), + (1, HUFFMAN_EMIT_SYMBOL, 55), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 55), + (1, HUFFMAN_EMIT_SYMBOL, 56), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 56), + (1, HUFFMAN_EMIT_SYMBOL, 57), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 57), + + # Node 16 + (2, HUFFMAN_EMIT_SYMBOL, 47), + (9, HUFFMAN_EMIT_SYMBOL, 47), + (23, HUFFMAN_EMIT_SYMBOL, 47), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 47), + (2, HUFFMAN_EMIT_SYMBOL, 51), + (9, HUFFMAN_EMIT_SYMBOL, 51), + (23, HUFFMAN_EMIT_SYMBOL, 51), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 51), + (2, HUFFMAN_EMIT_SYMBOL, 52), + (9, HUFFMAN_EMIT_SYMBOL, 52), + (23, HUFFMAN_EMIT_SYMBOL, 52), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 52), + (2, HUFFMAN_EMIT_SYMBOL, 53), + (9, HUFFMAN_EMIT_SYMBOL, 53), + (23, HUFFMAN_EMIT_SYMBOL, 53), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 53), + + # Node 17 + (3, HUFFMAN_EMIT_SYMBOL, 47), + (6, HUFFMAN_EMIT_SYMBOL, 47), + (10, HUFFMAN_EMIT_SYMBOL, 47), + (15, HUFFMAN_EMIT_SYMBOL, 47), + (24, HUFFMAN_EMIT_SYMBOL, 47), + (31, HUFFMAN_EMIT_SYMBOL, 47), + (41, HUFFMAN_EMIT_SYMBOL, 47), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 47), + (3, HUFFMAN_EMIT_SYMBOL, 51), + (6, HUFFMAN_EMIT_SYMBOL, 51), + (10, HUFFMAN_EMIT_SYMBOL, 51), + (15, HUFFMAN_EMIT_SYMBOL, 51), + (24, HUFFMAN_EMIT_SYMBOL, 51), + (31, HUFFMAN_EMIT_SYMBOL, 51), + (41, HUFFMAN_EMIT_SYMBOL, 51), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 51), + + # Node 18 + (3, HUFFMAN_EMIT_SYMBOL, 52), + (6, HUFFMAN_EMIT_SYMBOL, 52), + (10, HUFFMAN_EMIT_SYMBOL, 52), + (15, HUFFMAN_EMIT_SYMBOL, 52), + (24, HUFFMAN_EMIT_SYMBOL, 52), + (31, HUFFMAN_EMIT_SYMBOL, 52), + (41, HUFFMAN_EMIT_SYMBOL, 52), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 52), + (3, HUFFMAN_EMIT_SYMBOL, 53), + (6, HUFFMAN_EMIT_SYMBOL, 53), + (10, HUFFMAN_EMIT_SYMBOL, 53), + (15, HUFFMAN_EMIT_SYMBOL, 53), + (24, HUFFMAN_EMIT_SYMBOL, 53), + (31, HUFFMAN_EMIT_SYMBOL, 53), + (41, HUFFMAN_EMIT_SYMBOL, 53), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 53), + + # Node 19 + (2, HUFFMAN_EMIT_SYMBOL, 54), + (9, HUFFMAN_EMIT_SYMBOL, 54), + (23, HUFFMAN_EMIT_SYMBOL, 54), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 54), + (2, HUFFMAN_EMIT_SYMBOL, 55), + (9, HUFFMAN_EMIT_SYMBOL, 55), + (23, HUFFMAN_EMIT_SYMBOL, 55), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 55), + (2, HUFFMAN_EMIT_SYMBOL, 56), + (9, HUFFMAN_EMIT_SYMBOL, 56), + (23, HUFFMAN_EMIT_SYMBOL, 56), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 56), + (2, HUFFMAN_EMIT_SYMBOL, 57), + (9, HUFFMAN_EMIT_SYMBOL, 57), + (23, HUFFMAN_EMIT_SYMBOL, 57), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 57), + + # Node 20 + (3, HUFFMAN_EMIT_SYMBOL, 54), + (6, HUFFMAN_EMIT_SYMBOL, 54), + (10, HUFFMAN_EMIT_SYMBOL, 54), + (15, HUFFMAN_EMIT_SYMBOL, 54), + (24, HUFFMAN_EMIT_SYMBOL, 54), + (31, HUFFMAN_EMIT_SYMBOL, 54), + (41, HUFFMAN_EMIT_SYMBOL, 54), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 54), + (3, HUFFMAN_EMIT_SYMBOL, 55), + (6, HUFFMAN_EMIT_SYMBOL, 55), + (10, HUFFMAN_EMIT_SYMBOL, 55), + (15, HUFFMAN_EMIT_SYMBOL, 55), + (24, HUFFMAN_EMIT_SYMBOL, 55), + (31, HUFFMAN_EMIT_SYMBOL, 55), + (41, HUFFMAN_EMIT_SYMBOL, 55), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 55), + + # Node 21 + (3, HUFFMAN_EMIT_SYMBOL, 56), + (6, HUFFMAN_EMIT_SYMBOL, 56), + (10, HUFFMAN_EMIT_SYMBOL, 56), + (15, HUFFMAN_EMIT_SYMBOL, 56), + (24, HUFFMAN_EMIT_SYMBOL, 56), + (31, HUFFMAN_EMIT_SYMBOL, 56), + (41, HUFFMAN_EMIT_SYMBOL, 56), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 56), + (3, HUFFMAN_EMIT_SYMBOL, 57), + (6, HUFFMAN_EMIT_SYMBOL, 57), + (10, HUFFMAN_EMIT_SYMBOL, 57), + (15, HUFFMAN_EMIT_SYMBOL, 57), + (24, HUFFMAN_EMIT_SYMBOL, 57), + (31, HUFFMAN_EMIT_SYMBOL, 57), + (41, HUFFMAN_EMIT_SYMBOL, 57), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 57), + + # Node 22 + (26, 0, 0), + (27, 0, 0), + (29, 0, 0), + (30, 0, 0), + (33, 0, 0), + (34, 0, 0), + (36, 0, 0), + (37, 0, 0), + (43, 0, 0), + (46, 0, 0), + (50, 0, 0), + (53, 0, 0), + (58, 0, 0), + (61, 0, 0), + (65, 0, 0), + (68, HUFFMAN_COMPLETE, 0), + + # Node 23 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 61), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 65), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 95), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 98), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 100), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 102), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 103), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 104), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 108), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 109), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 110), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 112), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 114), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 117), + (38, 0, 0), + (39, 0, 0), + + # Node 24 + (1, HUFFMAN_EMIT_SYMBOL, 61), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 61), + (1, HUFFMAN_EMIT_SYMBOL, 65), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 65), + (1, HUFFMAN_EMIT_SYMBOL, 95), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 95), + (1, HUFFMAN_EMIT_SYMBOL, 98), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 98), + (1, HUFFMAN_EMIT_SYMBOL, 100), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 100), + (1, HUFFMAN_EMIT_SYMBOL, 102), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 102), + (1, HUFFMAN_EMIT_SYMBOL, 103), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 103), + (1, HUFFMAN_EMIT_SYMBOL, 104), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 104), + + # Node 25 + (2, HUFFMAN_EMIT_SYMBOL, 61), + (9, HUFFMAN_EMIT_SYMBOL, 61), + (23, HUFFMAN_EMIT_SYMBOL, 61), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 61), + (2, HUFFMAN_EMIT_SYMBOL, 65), + (9, HUFFMAN_EMIT_SYMBOL, 65), + (23, HUFFMAN_EMIT_SYMBOL, 65), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 65), + (2, HUFFMAN_EMIT_SYMBOL, 95), + (9, HUFFMAN_EMIT_SYMBOL, 95), + (23, HUFFMAN_EMIT_SYMBOL, 95), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 95), + (2, HUFFMAN_EMIT_SYMBOL, 98), + (9, HUFFMAN_EMIT_SYMBOL, 98), + (23, HUFFMAN_EMIT_SYMBOL, 98), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 98), + + # Node 26 + (3, HUFFMAN_EMIT_SYMBOL, 61), + (6, HUFFMAN_EMIT_SYMBOL, 61), + (10, HUFFMAN_EMIT_SYMBOL, 61), + (15, HUFFMAN_EMIT_SYMBOL, 61), + (24, HUFFMAN_EMIT_SYMBOL, 61), + (31, HUFFMAN_EMIT_SYMBOL, 61), + (41, HUFFMAN_EMIT_SYMBOL, 61), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 61), + (3, HUFFMAN_EMIT_SYMBOL, 65), + (6, HUFFMAN_EMIT_SYMBOL, 65), + (10, HUFFMAN_EMIT_SYMBOL, 65), + (15, HUFFMAN_EMIT_SYMBOL, 65), + (24, HUFFMAN_EMIT_SYMBOL, 65), + (31, HUFFMAN_EMIT_SYMBOL, 65), + (41, HUFFMAN_EMIT_SYMBOL, 65), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 65), + + # Node 27 + (3, HUFFMAN_EMIT_SYMBOL, 95), + (6, HUFFMAN_EMIT_SYMBOL, 95), + (10, HUFFMAN_EMIT_SYMBOL, 95), + (15, HUFFMAN_EMIT_SYMBOL, 95), + (24, HUFFMAN_EMIT_SYMBOL, 95), + (31, HUFFMAN_EMIT_SYMBOL, 95), + (41, HUFFMAN_EMIT_SYMBOL, 95), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 95), + (3, HUFFMAN_EMIT_SYMBOL, 98), + (6, HUFFMAN_EMIT_SYMBOL, 98), + (10, HUFFMAN_EMIT_SYMBOL, 98), + (15, HUFFMAN_EMIT_SYMBOL, 98), + (24, HUFFMAN_EMIT_SYMBOL, 98), + (31, HUFFMAN_EMIT_SYMBOL, 98), + (41, HUFFMAN_EMIT_SYMBOL, 98), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 98), + + # Node 28 + (2, HUFFMAN_EMIT_SYMBOL, 100), + (9, HUFFMAN_EMIT_SYMBOL, 100), + (23, HUFFMAN_EMIT_SYMBOL, 100), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 100), + (2, HUFFMAN_EMIT_SYMBOL, 102), + (9, HUFFMAN_EMIT_SYMBOL, 102), + (23, HUFFMAN_EMIT_SYMBOL, 102), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 102), + (2, HUFFMAN_EMIT_SYMBOL, 103), + (9, HUFFMAN_EMIT_SYMBOL, 103), + (23, HUFFMAN_EMIT_SYMBOL, 103), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 103), + (2, HUFFMAN_EMIT_SYMBOL, 104), + (9, HUFFMAN_EMIT_SYMBOL, 104), + (23, HUFFMAN_EMIT_SYMBOL, 104), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 104), + + # Node 29 + (3, HUFFMAN_EMIT_SYMBOL, 100), + (6, HUFFMAN_EMIT_SYMBOL, 100), + (10, HUFFMAN_EMIT_SYMBOL, 100), + (15, HUFFMAN_EMIT_SYMBOL, 100), + (24, HUFFMAN_EMIT_SYMBOL, 100), + (31, HUFFMAN_EMIT_SYMBOL, 100), + (41, HUFFMAN_EMIT_SYMBOL, 100), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 100), + (3, HUFFMAN_EMIT_SYMBOL, 102), + (6, HUFFMAN_EMIT_SYMBOL, 102), + (10, HUFFMAN_EMIT_SYMBOL, 102), + (15, HUFFMAN_EMIT_SYMBOL, 102), + (24, HUFFMAN_EMIT_SYMBOL, 102), + (31, HUFFMAN_EMIT_SYMBOL, 102), + (41, HUFFMAN_EMIT_SYMBOL, 102), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 102), + + # Node 30 + (3, HUFFMAN_EMIT_SYMBOL, 103), + (6, HUFFMAN_EMIT_SYMBOL, 103), + (10, HUFFMAN_EMIT_SYMBOL, 103), + (15, HUFFMAN_EMIT_SYMBOL, 103), + (24, HUFFMAN_EMIT_SYMBOL, 103), + (31, HUFFMAN_EMIT_SYMBOL, 103), + (41, HUFFMAN_EMIT_SYMBOL, 103), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 103), + (3, HUFFMAN_EMIT_SYMBOL, 104), + (6, HUFFMAN_EMIT_SYMBOL, 104), + (10, HUFFMAN_EMIT_SYMBOL, 104), + (15, HUFFMAN_EMIT_SYMBOL, 104), + (24, HUFFMAN_EMIT_SYMBOL, 104), + (31, HUFFMAN_EMIT_SYMBOL, 104), + (41, HUFFMAN_EMIT_SYMBOL, 104), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 104), + + # Node 31 + (1, HUFFMAN_EMIT_SYMBOL, 108), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 108), + (1, HUFFMAN_EMIT_SYMBOL, 109), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 109), + (1, HUFFMAN_EMIT_SYMBOL, 110), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 110), + (1, HUFFMAN_EMIT_SYMBOL, 112), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 112), + (1, HUFFMAN_EMIT_SYMBOL, 114), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 114), + (1, HUFFMAN_EMIT_SYMBOL, 117), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 117), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 58), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 66), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 67), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 68), + + # Node 32 + (2, HUFFMAN_EMIT_SYMBOL, 108), + (9, HUFFMAN_EMIT_SYMBOL, 108), + (23, HUFFMAN_EMIT_SYMBOL, 108), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 108), + (2, HUFFMAN_EMIT_SYMBOL, 109), + (9, HUFFMAN_EMIT_SYMBOL, 109), + (23, HUFFMAN_EMIT_SYMBOL, 109), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 109), + (2, HUFFMAN_EMIT_SYMBOL, 110), + (9, HUFFMAN_EMIT_SYMBOL, 110), + (23, HUFFMAN_EMIT_SYMBOL, 110), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 110), + (2, HUFFMAN_EMIT_SYMBOL, 112), + (9, HUFFMAN_EMIT_SYMBOL, 112), + (23, HUFFMAN_EMIT_SYMBOL, 112), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 112), + + # Node 33 + (3, HUFFMAN_EMIT_SYMBOL, 108), + (6, HUFFMAN_EMIT_SYMBOL, 108), + (10, HUFFMAN_EMIT_SYMBOL, 108), + (15, HUFFMAN_EMIT_SYMBOL, 108), + (24, HUFFMAN_EMIT_SYMBOL, 108), + (31, HUFFMAN_EMIT_SYMBOL, 108), + (41, HUFFMAN_EMIT_SYMBOL, 108), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 108), + (3, HUFFMAN_EMIT_SYMBOL, 109), + (6, HUFFMAN_EMIT_SYMBOL, 109), + (10, HUFFMAN_EMIT_SYMBOL, 109), + (15, HUFFMAN_EMIT_SYMBOL, 109), + (24, HUFFMAN_EMIT_SYMBOL, 109), + (31, HUFFMAN_EMIT_SYMBOL, 109), + (41, HUFFMAN_EMIT_SYMBOL, 109), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 109), + + # Node 34 + (3, HUFFMAN_EMIT_SYMBOL, 110), + (6, HUFFMAN_EMIT_SYMBOL, 110), + (10, HUFFMAN_EMIT_SYMBOL, 110), + (15, HUFFMAN_EMIT_SYMBOL, 110), + (24, HUFFMAN_EMIT_SYMBOL, 110), + (31, HUFFMAN_EMIT_SYMBOL, 110), + (41, HUFFMAN_EMIT_SYMBOL, 110), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 110), + (3, HUFFMAN_EMIT_SYMBOL, 112), + (6, HUFFMAN_EMIT_SYMBOL, 112), + (10, HUFFMAN_EMIT_SYMBOL, 112), + (15, HUFFMAN_EMIT_SYMBOL, 112), + (24, HUFFMAN_EMIT_SYMBOL, 112), + (31, HUFFMAN_EMIT_SYMBOL, 112), + (41, HUFFMAN_EMIT_SYMBOL, 112), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 112), + + # Node 35 + (2, HUFFMAN_EMIT_SYMBOL, 114), + (9, HUFFMAN_EMIT_SYMBOL, 114), + (23, HUFFMAN_EMIT_SYMBOL, 114), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 114), + (2, HUFFMAN_EMIT_SYMBOL, 117), + (9, HUFFMAN_EMIT_SYMBOL, 117), + (23, HUFFMAN_EMIT_SYMBOL, 117), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 117), + (1, HUFFMAN_EMIT_SYMBOL, 58), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 58), + (1, HUFFMAN_EMIT_SYMBOL, 66), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 66), + (1, HUFFMAN_EMIT_SYMBOL, 67), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 67), + (1, HUFFMAN_EMIT_SYMBOL, 68), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 68), + + # Node 36 + (3, HUFFMAN_EMIT_SYMBOL, 114), + (6, HUFFMAN_EMIT_SYMBOL, 114), + (10, HUFFMAN_EMIT_SYMBOL, 114), + (15, HUFFMAN_EMIT_SYMBOL, 114), + (24, HUFFMAN_EMIT_SYMBOL, 114), + (31, HUFFMAN_EMIT_SYMBOL, 114), + (41, HUFFMAN_EMIT_SYMBOL, 114), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 114), + (3, HUFFMAN_EMIT_SYMBOL, 117), + (6, HUFFMAN_EMIT_SYMBOL, 117), + (10, HUFFMAN_EMIT_SYMBOL, 117), + (15, HUFFMAN_EMIT_SYMBOL, 117), + (24, HUFFMAN_EMIT_SYMBOL, 117), + (31, HUFFMAN_EMIT_SYMBOL, 117), + (41, HUFFMAN_EMIT_SYMBOL, 117), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 117), + + # Node 37 + (2, HUFFMAN_EMIT_SYMBOL, 58), + (9, HUFFMAN_EMIT_SYMBOL, 58), + (23, HUFFMAN_EMIT_SYMBOL, 58), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 58), + (2, HUFFMAN_EMIT_SYMBOL, 66), + (9, HUFFMAN_EMIT_SYMBOL, 66), + (23, HUFFMAN_EMIT_SYMBOL, 66), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 66), + (2, HUFFMAN_EMIT_SYMBOL, 67), + (9, HUFFMAN_EMIT_SYMBOL, 67), + (23, HUFFMAN_EMIT_SYMBOL, 67), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 67), + (2, HUFFMAN_EMIT_SYMBOL, 68), + (9, HUFFMAN_EMIT_SYMBOL, 68), + (23, HUFFMAN_EMIT_SYMBOL, 68), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 68), + + # Node 38 + (3, HUFFMAN_EMIT_SYMBOL, 58), + (6, HUFFMAN_EMIT_SYMBOL, 58), + (10, HUFFMAN_EMIT_SYMBOL, 58), + (15, HUFFMAN_EMIT_SYMBOL, 58), + (24, HUFFMAN_EMIT_SYMBOL, 58), + (31, HUFFMAN_EMIT_SYMBOL, 58), + (41, HUFFMAN_EMIT_SYMBOL, 58), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 58), + (3, HUFFMAN_EMIT_SYMBOL, 66), + (6, HUFFMAN_EMIT_SYMBOL, 66), + (10, HUFFMAN_EMIT_SYMBOL, 66), + (15, HUFFMAN_EMIT_SYMBOL, 66), + (24, HUFFMAN_EMIT_SYMBOL, 66), + (31, HUFFMAN_EMIT_SYMBOL, 66), + (41, HUFFMAN_EMIT_SYMBOL, 66), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 66), + + # Node 39 + (3, HUFFMAN_EMIT_SYMBOL, 67), + (6, HUFFMAN_EMIT_SYMBOL, 67), + (10, HUFFMAN_EMIT_SYMBOL, 67), + (15, HUFFMAN_EMIT_SYMBOL, 67), + (24, HUFFMAN_EMIT_SYMBOL, 67), + (31, HUFFMAN_EMIT_SYMBOL, 67), + (41, HUFFMAN_EMIT_SYMBOL, 67), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 67), + (3, HUFFMAN_EMIT_SYMBOL, 68), + (6, HUFFMAN_EMIT_SYMBOL, 68), + (10, HUFFMAN_EMIT_SYMBOL, 68), + (15, HUFFMAN_EMIT_SYMBOL, 68), + (24, HUFFMAN_EMIT_SYMBOL, 68), + (31, HUFFMAN_EMIT_SYMBOL, 68), + (41, HUFFMAN_EMIT_SYMBOL, 68), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 68), + + # Node 40 + (44, 0, 0), + (45, 0, 0), + (47, 0, 0), + (48, 0, 0), + (51, 0, 0), + (52, 0, 0), + (54, 0, 0), + (55, 0, 0), + (59, 0, 0), + (60, 0, 0), + (62, 0, 0), + (63, 0, 0), + (66, 0, 0), + (67, 0, 0), + (69, 0, 0), + (72, HUFFMAN_COMPLETE, 0), + + # Node 41 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 69), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 70), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 71), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 72), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 73), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 74), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 75), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 76), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 77), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 78), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 79), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 80), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 81), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 82), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 83), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 84), + + # Node 42 + (1, HUFFMAN_EMIT_SYMBOL, 69), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 69), + (1, HUFFMAN_EMIT_SYMBOL, 70), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 70), + (1, HUFFMAN_EMIT_SYMBOL, 71), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 71), + (1, HUFFMAN_EMIT_SYMBOL, 72), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 72), + (1, HUFFMAN_EMIT_SYMBOL, 73), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 73), + (1, HUFFMAN_EMIT_SYMBOL, 74), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 74), + (1, HUFFMAN_EMIT_SYMBOL, 75), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 75), + (1, HUFFMAN_EMIT_SYMBOL, 76), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 76), + + # Node 43 + (2, HUFFMAN_EMIT_SYMBOL, 69), + (9, HUFFMAN_EMIT_SYMBOL, 69), + (23, HUFFMAN_EMIT_SYMBOL, 69), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 69), + (2, HUFFMAN_EMIT_SYMBOL, 70), + (9, HUFFMAN_EMIT_SYMBOL, 70), + (23, HUFFMAN_EMIT_SYMBOL, 70), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 70), + (2, HUFFMAN_EMIT_SYMBOL, 71), + (9, HUFFMAN_EMIT_SYMBOL, 71), + (23, HUFFMAN_EMIT_SYMBOL, 71), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 71), + (2, HUFFMAN_EMIT_SYMBOL, 72), + (9, HUFFMAN_EMIT_SYMBOL, 72), + (23, HUFFMAN_EMIT_SYMBOL, 72), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 72), + + # Node 44 + (3, HUFFMAN_EMIT_SYMBOL, 69), + (6, HUFFMAN_EMIT_SYMBOL, 69), + (10, HUFFMAN_EMIT_SYMBOL, 69), + (15, HUFFMAN_EMIT_SYMBOL, 69), + (24, HUFFMAN_EMIT_SYMBOL, 69), + (31, HUFFMAN_EMIT_SYMBOL, 69), + (41, HUFFMAN_EMIT_SYMBOL, 69), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 69), + (3, HUFFMAN_EMIT_SYMBOL, 70), + (6, HUFFMAN_EMIT_SYMBOL, 70), + (10, HUFFMAN_EMIT_SYMBOL, 70), + (15, HUFFMAN_EMIT_SYMBOL, 70), + (24, HUFFMAN_EMIT_SYMBOL, 70), + (31, HUFFMAN_EMIT_SYMBOL, 70), + (41, HUFFMAN_EMIT_SYMBOL, 70), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 70), + + # Node 45 + (3, HUFFMAN_EMIT_SYMBOL, 71), + (6, HUFFMAN_EMIT_SYMBOL, 71), + (10, HUFFMAN_EMIT_SYMBOL, 71), + (15, HUFFMAN_EMIT_SYMBOL, 71), + (24, HUFFMAN_EMIT_SYMBOL, 71), + (31, HUFFMAN_EMIT_SYMBOL, 71), + (41, HUFFMAN_EMIT_SYMBOL, 71), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 71), + (3, HUFFMAN_EMIT_SYMBOL, 72), + (6, HUFFMAN_EMIT_SYMBOL, 72), + (10, HUFFMAN_EMIT_SYMBOL, 72), + (15, HUFFMAN_EMIT_SYMBOL, 72), + (24, HUFFMAN_EMIT_SYMBOL, 72), + (31, HUFFMAN_EMIT_SYMBOL, 72), + (41, HUFFMAN_EMIT_SYMBOL, 72), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 72), + + # Node 46 + (2, HUFFMAN_EMIT_SYMBOL, 73), + (9, HUFFMAN_EMIT_SYMBOL, 73), + (23, HUFFMAN_EMIT_SYMBOL, 73), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 73), + (2, HUFFMAN_EMIT_SYMBOL, 74), + (9, HUFFMAN_EMIT_SYMBOL, 74), + (23, HUFFMAN_EMIT_SYMBOL, 74), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 74), + (2, HUFFMAN_EMIT_SYMBOL, 75), + (9, HUFFMAN_EMIT_SYMBOL, 75), + (23, HUFFMAN_EMIT_SYMBOL, 75), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 75), + (2, HUFFMAN_EMIT_SYMBOL, 76), + (9, HUFFMAN_EMIT_SYMBOL, 76), + (23, HUFFMAN_EMIT_SYMBOL, 76), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 76), + + # Node 47 + (3, HUFFMAN_EMIT_SYMBOL, 73), + (6, HUFFMAN_EMIT_SYMBOL, 73), + (10, HUFFMAN_EMIT_SYMBOL, 73), + (15, HUFFMAN_EMIT_SYMBOL, 73), + (24, HUFFMAN_EMIT_SYMBOL, 73), + (31, HUFFMAN_EMIT_SYMBOL, 73), + (41, HUFFMAN_EMIT_SYMBOL, 73), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 73), + (3, HUFFMAN_EMIT_SYMBOL, 74), + (6, HUFFMAN_EMIT_SYMBOL, 74), + (10, HUFFMAN_EMIT_SYMBOL, 74), + (15, HUFFMAN_EMIT_SYMBOL, 74), + (24, HUFFMAN_EMIT_SYMBOL, 74), + (31, HUFFMAN_EMIT_SYMBOL, 74), + (41, HUFFMAN_EMIT_SYMBOL, 74), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 74), + + # Node 48 + (3, HUFFMAN_EMIT_SYMBOL, 75), + (6, HUFFMAN_EMIT_SYMBOL, 75), + (10, HUFFMAN_EMIT_SYMBOL, 75), + (15, HUFFMAN_EMIT_SYMBOL, 75), + (24, HUFFMAN_EMIT_SYMBOL, 75), + (31, HUFFMAN_EMIT_SYMBOL, 75), + (41, HUFFMAN_EMIT_SYMBOL, 75), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 75), + (3, HUFFMAN_EMIT_SYMBOL, 76), + (6, HUFFMAN_EMIT_SYMBOL, 76), + (10, HUFFMAN_EMIT_SYMBOL, 76), + (15, HUFFMAN_EMIT_SYMBOL, 76), + (24, HUFFMAN_EMIT_SYMBOL, 76), + (31, HUFFMAN_EMIT_SYMBOL, 76), + (41, HUFFMAN_EMIT_SYMBOL, 76), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 76), + + # Node 49 + (1, HUFFMAN_EMIT_SYMBOL, 77), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 77), + (1, HUFFMAN_EMIT_SYMBOL, 78), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 78), + (1, HUFFMAN_EMIT_SYMBOL, 79), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 79), + (1, HUFFMAN_EMIT_SYMBOL, 80), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 80), + (1, HUFFMAN_EMIT_SYMBOL, 81), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 81), + (1, HUFFMAN_EMIT_SYMBOL, 82), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 82), + (1, HUFFMAN_EMIT_SYMBOL, 83), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 83), + (1, HUFFMAN_EMIT_SYMBOL, 84), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 84), + + # Node 50 + (2, HUFFMAN_EMIT_SYMBOL, 77), + (9, HUFFMAN_EMIT_SYMBOL, 77), + (23, HUFFMAN_EMIT_SYMBOL, 77), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 77), + (2, HUFFMAN_EMIT_SYMBOL, 78), + (9, HUFFMAN_EMIT_SYMBOL, 78), + (23, HUFFMAN_EMIT_SYMBOL, 78), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 78), + (2, HUFFMAN_EMIT_SYMBOL, 79), + (9, HUFFMAN_EMIT_SYMBOL, 79), + (23, HUFFMAN_EMIT_SYMBOL, 79), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 79), + (2, HUFFMAN_EMIT_SYMBOL, 80), + (9, HUFFMAN_EMIT_SYMBOL, 80), + (23, HUFFMAN_EMIT_SYMBOL, 80), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 80), + + # Node 51 + (3, HUFFMAN_EMIT_SYMBOL, 77), + (6, HUFFMAN_EMIT_SYMBOL, 77), + (10, HUFFMAN_EMIT_SYMBOL, 77), + (15, HUFFMAN_EMIT_SYMBOL, 77), + (24, HUFFMAN_EMIT_SYMBOL, 77), + (31, HUFFMAN_EMIT_SYMBOL, 77), + (41, HUFFMAN_EMIT_SYMBOL, 77), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 77), + (3, HUFFMAN_EMIT_SYMBOL, 78), + (6, HUFFMAN_EMIT_SYMBOL, 78), + (10, HUFFMAN_EMIT_SYMBOL, 78), + (15, HUFFMAN_EMIT_SYMBOL, 78), + (24, HUFFMAN_EMIT_SYMBOL, 78), + (31, HUFFMAN_EMIT_SYMBOL, 78), + (41, HUFFMAN_EMIT_SYMBOL, 78), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 78), + + # Node 52 + (3, HUFFMAN_EMIT_SYMBOL, 79), + (6, HUFFMAN_EMIT_SYMBOL, 79), + (10, HUFFMAN_EMIT_SYMBOL, 79), + (15, HUFFMAN_EMIT_SYMBOL, 79), + (24, HUFFMAN_EMIT_SYMBOL, 79), + (31, HUFFMAN_EMIT_SYMBOL, 79), + (41, HUFFMAN_EMIT_SYMBOL, 79), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 79), + (3, HUFFMAN_EMIT_SYMBOL, 80), + (6, HUFFMAN_EMIT_SYMBOL, 80), + (10, HUFFMAN_EMIT_SYMBOL, 80), + (15, HUFFMAN_EMIT_SYMBOL, 80), + (24, HUFFMAN_EMIT_SYMBOL, 80), + (31, HUFFMAN_EMIT_SYMBOL, 80), + (41, HUFFMAN_EMIT_SYMBOL, 80), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 80), + + # Node 53 + (2, HUFFMAN_EMIT_SYMBOL, 81), + (9, HUFFMAN_EMIT_SYMBOL, 81), + (23, HUFFMAN_EMIT_SYMBOL, 81), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 81), + (2, HUFFMAN_EMIT_SYMBOL, 82), + (9, HUFFMAN_EMIT_SYMBOL, 82), + (23, HUFFMAN_EMIT_SYMBOL, 82), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 82), + (2, HUFFMAN_EMIT_SYMBOL, 83), + (9, HUFFMAN_EMIT_SYMBOL, 83), + (23, HUFFMAN_EMIT_SYMBOL, 83), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 83), + (2, HUFFMAN_EMIT_SYMBOL, 84), + (9, HUFFMAN_EMIT_SYMBOL, 84), + (23, HUFFMAN_EMIT_SYMBOL, 84), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 84), + + # Node 54 + (3, HUFFMAN_EMIT_SYMBOL, 81), + (6, HUFFMAN_EMIT_SYMBOL, 81), + (10, HUFFMAN_EMIT_SYMBOL, 81), + (15, HUFFMAN_EMIT_SYMBOL, 81), + (24, HUFFMAN_EMIT_SYMBOL, 81), + (31, HUFFMAN_EMIT_SYMBOL, 81), + (41, HUFFMAN_EMIT_SYMBOL, 81), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 81), + (3, HUFFMAN_EMIT_SYMBOL, 82), + (6, HUFFMAN_EMIT_SYMBOL, 82), + (10, HUFFMAN_EMIT_SYMBOL, 82), + (15, HUFFMAN_EMIT_SYMBOL, 82), + (24, HUFFMAN_EMIT_SYMBOL, 82), + (31, HUFFMAN_EMIT_SYMBOL, 82), + (41, HUFFMAN_EMIT_SYMBOL, 82), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 82), + + # Node 55 + (3, HUFFMAN_EMIT_SYMBOL, 83), + (6, HUFFMAN_EMIT_SYMBOL, 83), + (10, HUFFMAN_EMIT_SYMBOL, 83), + (15, HUFFMAN_EMIT_SYMBOL, 83), + (24, HUFFMAN_EMIT_SYMBOL, 83), + (31, HUFFMAN_EMIT_SYMBOL, 83), + (41, HUFFMAN_EMIT_SYMBOL, 83), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 83), + (3, HUFFMAN_EMIT_SYMBOL, 84), + (6, HUFFMAN_EMIT_SYMBOL, 84), + (10, HUFFMAN_EMIT_SYMBOL, 84), + (15, HUFFMAN_EMIT_SYMBOL, 84), + (24, HUFFMAN_EMIT_SYMBOL, 84), + (31, HUFFMAN_EMIT_SYMBOL, 84), + (41, HUFFMAN_EMIT_SYMBOL, 84), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 84), + + # Node 56 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 85), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 86), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 87), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 89), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 106), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 107), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 113), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 118), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 119), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 120), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 121), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 122), + (70, 0, 0), + (71, 0, 0), + (73, 0, 0), + (74, HUFFMAN_COMPLETE, 0), + + # Node 57 + (1, HUFFMAN_EMIT_SYMBOL, 85), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 85), + (1, HUFFMAN_EMIT_SYMBOL, 86), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 86), + (1, HUFFMAN_EMIT_SYMBOL, 87), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 87), + (1, HUFFMAN_EMIT_SYMBOL, 89), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 89), + (1, HUFFMAN_EMIT_SYMBOL, 106), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 106), + (1, HUFFMAN_EMIT_SYMBOL, 107), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 107), + (1, HUFFMAN_EMIT_SYMBOL, 113), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 113), + (1, HUFFMAN_EMIT_SYMBOL, 118), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 118), + + # Node 58 + (2, HUFFMAN_EMIT_SYMBOL, 85), + (9, HUFFMAN_EMIT_SYMBOL, 85), + (23, HUFFMAN_EMIT_SYMBOL, 85), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 85), + (2, HUFFMAN_EMIT_SYMBOL, 86), + (9, HUFFMAN_EMIT_SYMBOL, 86), + (23, HUFFMAN_EMIT_SYMBOL, 86), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 86), + (2, HUFFMAN_EMIT_SYMBOL, 87), + (9, HUFFMAN_EMIT_SYMBOL, 87), + (23, HUFFMAN_EMIT_SYMBOL, 87), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 87), + (2, HUFFMAN_EMIT_SYMBOL, 89), + (9, HUFFMAN_EMIT_SYMBOL, 89), + (23, HUFFMAN_EMIT_SYMBOL, 89), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 89), + + # Node 59 + (3, HUFFMAN_EMIT_SYMBOL, 85), + (6, HUFFMAN_EMIT_SYMBOL, 85), + (10, HUFFMAN_EMIT_SYMBOL, 85), + (15, HUFFMAN_EMIT_SYMBOL, 85), + (24, HUFFMAN_EMIT_SYMBOL, 85), + (31, HUFFMAN_EMIT_SYMBOL, 85), + (41, HUFFMAN_EMIT_SYMBOL, 85), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 85), + (3, HUFFMAN_EMIT_SYMBOL, 86), + (6, HUFFMAN_EMIT_SYMBOL, 86), + (10, HUFFMAN_EMIT_SYMBOL, 86), + (15, HUFFMAN_EMIT_SYMBOL, 86), + (24, HUFFMAN_EMIT_SYMBOL, 86), + (31, HUFFMAN_EMIT_SYMBOL, 86), + (41, HUFFMAN_EMIT_SYMBOL, 86), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 86), + + # Node 60 + (3, HUFFMAN_EMIT_SYMBOL, 87), + (6, HUFFMAN_EMIT_SYMBOL, 87), + (10, HUFFMAN_EMIT_SYMBOL, 87), + (15, HUFFMAN_EMIT_SYMBOL, 87), + (24, HUFFMAN_EMIT_SYMBOL, 87), + (31, HUFFMAN_EMIT_SYMBOL, 87), + (41, HUFFMAN_EMIT_SYMBOL, 87), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 87), + (3, HUFFMAN_EMIT_SYMBOL, 89), + (6, HUFFMAN_EMIT_SYMBOL, 89), + (10, HUFFMAN_EMIT_SYMBOL, 89), + (15, HUFFMAN_EMIT_SYMBOL, 89), + (24, HUFFMAN_EMIT_SYMBOL, 89), + (31, HUFFMAN_EMIT_SYMBOL, 89), + (41, HUFFMAN_EMIT_SYMBOL, 89), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 89), + + # Node 61 + (2, HUFFMAN_EMIT_SYMBOL, 106), + (9, HUFFMAN_EMIT_SYMBOL, 106), + (23, HUFFMAN_EMIT_SYMBOL, 106), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 106), + (2, HUFFMAN_EMIT_SYMBOL, 107), + (9, HUFFMAN_EMIT_SYMBOL, 107), + (23, HUFFMAN_EMIT_SYMBOL, 107), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 107), + (2, HUFFMAN_EMIT_SYMBOL, 113), + (9, HUFFMAN_EMIT_SYMBOL, 113), + (23, HUFFMAN_EMIT_SYMBOL, 113), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 113), + (2, HUFFMAN_EMIT_SYMBOL, 118), + (9, HUFFMAN_EMIT_SYMBOL, 118), + (23, HUFFMAN_EMIT_SYMBOL, 118), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 118), + + # Node 62 + (3, HUFFMAN_EMIT_SYMBOL, 106), + (6, HUFFMAN_EMIT_SYMBOL, 106), + (10, HUFFMAN_EMIT_SYMBOL, 106), + (15, HUFFMAN_EMIT_SYMBOL, 106), + (24, HUFFMAN_EMIT_SYMBOL, 106), + (31, HUFFMAN_EMIT_SYMBOL, 106), + (41, HUFFMAN_EMIT_SYMBOL, 106), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 106), + (3, HUFFMAN_EMIT_SYMBOL, 107), + (6, HUFFMAN_EMIT_SYMBOL, 107), + (10, HUFFMAN_EMIT_SYMBOL, 107), + (15, HUFFMAN_EMIT_SYMBOL, 107), + (24, HUFFMAN_EMIT_SYMBOL, 107), + (31, HUFFMAN_EMIT_SYMBOL, 107), + (41, HUFFMAN_EMIT_SYMBOL, 107), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 107), + + # Node 63 + (3, HUFFMAN_EMIT_SYMBOL, 113), + (6, HUFFMAN_EMIT_SYMBOL, 113), + (10, HUFFMAN_EMIT_SYMBOL, 113), + (15, HUFFMAN_EMIT_SYMBOL, 113), + (24, HUFFMAN_EMIT_SYMBOL, 113), + (31, HUFFMAN_EMIT_SYMBOL, 113), + (41, HUFFMAN_EMIT_SYMBOL, 113), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 113), + (3, HUFFMAN_EMIT_SYMBOL, 118), + (6, HUFFMAN_EMIT_SYMBOL, 118), + (10, HUFFMAN_EMIT_SYMBOL, 118), + (15, HUFFMAN_EMIT_SYMBOL, 118), + (24, HUFFMAN_EMIT_SYMBOL, 118), + (31, HUFFMAN_EMIT_SYMBOL, 118), + (41, HUFFMAN_EMIT_SYMBOL, 118), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 118), + + # Node 64 + (1, HUFFMAN_EMIT_SYMBOL, 119), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 119), + (1, HUFFMAN_EMIT_SYMBOL, 120), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 120), + (1, HUFFMAN_EMIT_SYMBOL, 121), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 121), + (1, HUFFMAN_EMIT_SYMBOL, 122), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 122), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 38), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 42), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 44), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 59), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 88), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 90), + (75, 0, 0), + (78, 0, 0), + + # Node 65 + (2, HUFFMAN_EMIT_SYMBOL, 119), + (9, HUFFMAN_EMIT_SYMBOL, 119), + (23, HUFFMAN_EMIT_SYMBOL, 119), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 119), + (2, HUFFMAN_EMIT_SYMBOL, 120), + (9, HUFFMAN_EMIT_SYMBOL, 120), + (23, HUFFMAN_EMIT_SYMBOL, 120), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 120), + (2, HUFFMAN_EMIT_SYMBOL, 121), + (9, HUFFMAN_EMIT_SYMBOL, 121), + (23, HUFFMAN_EMIT_SYMBOL, 121), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 121), + (2, HUFFMAN_EMIT_SYMBOL, 122), + (9, HUFFMAN_EMIT_SYMBOL, 122), + (23, HUFFMAN_EMIT_SYMBOL, 122), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 122), + + # Node 66 + (3, HUFFMAN_EMIT_SYMBOL, 119), + (6, HUFFMAN_EMIT_SYMBOL, 119), + (10, HUFFMAN_EMIT_SYMBOL, 119), + (15, HUFFMAN_EMIT_SYMBOL, 119), + (24, HUFFMAN_EMIT_SYMBOL, 119), + (31, HUFFMAN_EMIT_SYMBOL, 119), + (41, HUFFMAN_EMIT_SYMBOL, 119), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 119), + (3, HUFFMAN_EMIT_SYMBOL, 120), + (6, HUFFMAN_EMIT_SYMBOL, 120), + (10, HUFFMAN_EMIT_SYMBOL, 120), + (15, HUFFMAN_EMIT_SYMBOL, 120), + (24, HUFFMAN_EMIT_SYMBOL, 120), + (31, HUFFMAN_EMIT_SYMBOL, 120), + (41, HUFFMAN_EMIT_SYMBOL, 120), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 120), + + # Node 67 + (3, HUFFMAN_EMIT_SYMBOL, 121), + (6, HUFFMAN_EMIT_SYMBOL, 121), + (10, HUFFMAN_EMIT_SYMBOL, 121), + (15, HUFFMAN_EMIT_SYMBOL, 121), + (24, HUFFMAN_EMIT_SYMBOL, 121), + (31, HUFFMAN_EMIT_SYMBOL, 121), + (41, HUFFMAN_EMIT_SYMBOL, 121), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 121), + (3, HUFFMAN_EMIT_SYMBOL, 122), + (6, HUFFMAN_EMIT_SYMBOL, 122), + (10, HUFFMAN_EMIT_SYMBOL, 122), + (15, HUFFMAN_EMIT_SYMBOL, 122), + (24, HUFFMAN_EMIT_SYMBOL, 122), + (31, HUFFMAN_EMIT_SYMBOL, 122), + (41, HUFFMAN_EMIT_SYMBOL, 122), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 122), + + # Node 68 + (1, HUFFMAN_EMIT_SYMBOL, 38), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 38), + (1, HUFFMAN_EMIT_SYMBOL, 42), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 42), + (1, HUFFMAN_EMIT_SYMBOL, 44), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 44), + (1, HUFFMAN_EMIT_SYMBOL, 59), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 59), + (1, HUFFMAN_EMIT_SYMBOL, 88), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 88), + (1, HUFFMAN_EMIT_SYMBOL, 90), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 90), + (76, 0, 0), + (77, 0, 0), + (79, 0, 0), + (81, 0, 0), + + # Node 69 + (2, HUFFMAN_EMIT_SYMBOL, 38), + (9, HUFFMAN_EMIT_SYMBOL, 38), + (23, HUFFMAN_EMIT_SYMBOL, 38), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 38), + (2, HUFFMAN_EMIT_SYMBOL, 42), + (9, HUFFMAN_EMIT_SYMBOL, 42), + (23, HUFFMAN_EMIT_SYMBOL, 42), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 42), + (2, HUFFMAN_EMIT_SYMBOL, 44), + (9, HUFFMAN_EMIT_SYMBOL, 44), + (23, HUFFMAN_EMIT_SYMBOL, 44), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 44), + (2, HUFFMAN_EMIT_SYMBOL, 59), + (9, HUFFMAN_EMIT_SYMBOL, 59), + (23, HUFFMAN_EMIT_SYMBOL, 59), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 59), + + # Node 70 + (3, HUFFMAN_EMIT_SYMBOL, 38), + (6, HUFFMAN_EMIT_SYMBOL, 38), + (10, HUFFMAN_EMIT_SYMBOL, 38), + (15, HUFFMAN_EMIT_SYMBOL, 38), + (24, HUFFMAN_EMIT_SYMBOL, 38), + (31, HUFFMAN_EMIT_SYMBOL, 38), + (41, HUFFMAN_EMIT_SYMBOL, 38), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 38), + (3, HUFFMAN_EMIT_SYMBOL, 42), + (6, HUFFMAN_EMIT_SYMBOL, 42), + (10, HUFFMAN_EMIT_SYMBOL, 42), + (15, HUFFMAN_EMIT_SYMBOL, 42), + (24, HUFFMAN_EMIT_SYMBOL, 42), + (31, HUFFMAN_EMIT_SYMBOL, 42), + (41, HUFFMAN_EMIT_SYMBOL, 42), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 42), + + # Node 71 + (3, HUFFMAN_EMIT_SYMBOL, 44), + (6, HUFFMAN_EMIT_SYMBOL, 44), + (10, HUFFMAN_EMIT_SYMBOL, 44), + (15, HUFFMAN_EMIT_SYMBOL, 44), + (24, HUFFMAN_EMIT_SYMBOL, 44), + (31, HUFFMAN_EMIT_SYMBOL, 44), + (41, HUFFMAN_EMIT_SYMBOL, 44), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 44), + (3, HUFFMAN_EMIT_SYMBOL, 59), + (6, HUFFMAN_EMIT_SYMBOL, 59), + (10, HUFFMAN_EMIT_SYMBOL, 59), + (15, HUFFMAN_EMIT_SYMBOL, 59), + (24, HUFFMAN_EMIT_SYMBOL, 59), + (31, HUFFMAN_EMIT_SYMBOL, 59), + (41, HUFFMAN_EMIT_SYMBOL, 59), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 59), + + # Node 72 + (2, HUFFMAN_EMIT_SYMBOL, 88), + (9, HUFFMAN_EMIT_SYMBOL, 88), + (23, HUFFMAN_EMIT_SYMBOL, 88), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 88), + (2, HUFFMAN_EMIT_SYMBOL, 90), + (9, HUFFMAN_EMIT_SYMBOL, 90), + (23, HUFFMAN_EMIT_SYMBOL, 90), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 90), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 33), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 34), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 40), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 41), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 63), + (80, 0, 0), + (82, 0, 0), + (84, 0, 0), + + # Node 73 + (3, HUFFMAN_EMIT_SYMBOL, 88), + (6, HUFFMAN_EMIT_SYMBOL, 88), + (10, HUFFMAN_EMIT_SYMBOL, 88), + (15, HUFFMAN_EMIT_SYMBOL, 88), + (24, HUFFMAN_EMIT_SYMBOL, 88), + (31, HUFFMAN_EMIT_SYMBOL, 88), + (41, HUFFMAN_EMIT_SYMBOL, 88), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 88), + (3, HUFFMAN_EMIT_SYMBOL, 90), + (6, HUFFMAN_EMIT_SYMBOL, 90), + (10, HUFFMAN_EMIT_SYMBOL, 90), + (15, HUFFMAN_EMIT_SYMBOL, 90), + (24, HUFFMAN_EMIT_SYMBOL, 90), + (31, HUFFMAN_EMIT_SYMBOL, 90), + (41, HUFFMAN_EMIT_SYMBOL, 90), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 90), + + # Node 74 + (1, HUFFMAN_EMIT_SYMBOL, 33), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 33), + (1, HUFFMAN_EMIT_SYMBOL, 34), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 34), + (1, HUFFMAN_EMIT_SYMBOL, 40), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 40), + (1, HUFFMAN_EMIT_SYMBOL, 41), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 41), + (1, HUFFMAN_EMIT_SYMBOL, 63), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 63), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 39), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 43), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 124), + (83, 0, 0), + (85, 0, 0), + (88, 0, 0), + + # Node 75 + (2, HUFFMAN_EMIT_SYMBOL, 33), + (9, HUFFMAN_EMIT_SYMBOL, 33), + (23, HUFFMAN_EMIT_SYMBOL, 33), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 33), + (2, HUFFMAN_EMIT_SYMBOL, 34), + (9, HUFFMAN_EMIT_SYMBOL, 34), + (23, HUFFMAN_EMIT_SYMBOL, 34), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 34), + (2, HUFFMAN_EMIT_SYMBOL, 40), + (9, HUFFMAN_EMIT_SYMBOL, 40), + (23, HUFFMAN_EMIT_SYMBOL, 40), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 40), + (2, HUFFMAN_EMIT_SYMBOL, 41), + (9, HUFFMAN_EMIT_SYMBOL, 41), + (23, HUFFMAN_EMIT_SYMBOL, 41), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 41), + + # Node 76 + (3, HUFFMAN_EMIT_SYMBOL, 33), + (6, HUFFMAN_EMIT_SYMBOL, 33), + (10, HUFFMAN_EMIT_SYMBOL, 33), + (15, HUFFMAN_EMIT_SYMBOL, 33), + (24, HUFFMAN_EMIT_SYMBOL, 33), + (31, HUFFMAN_EMIT_SYMBOL, 33), + (41, HUFFMAN_EMIT_SYMBOL, 33), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 33), + (3, HUFFMAN_EMIT_SYMBOL, 34), + (6, HUFFMAN_EMIT_SYMBOL, 34), + (10, HUFFMAN_EMIT_SYMBOL, 34), + (15, HUFFMAN_EMIT_SYMBOL, 34), + (24, HUFFMAN_EMIT_SYMBOL, 34), + (31, HUFFMAN_EMIT_SYMBOL, 34), + (41, HUFFMAN_EMIT_SYMBOL, 34), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 34), + + # Node 77 + (3, HUFFMAN_EMIT_SYMBOL, 40), + (6, HUFFMAN_EMIT_SYMBOL, 40), + (10, HUFFMAN_EMIT_SYMBOL, 40), + (15, HUFFMAN_EMIT_SYMBOL, 40), + (24, HUFFMAN_EMIT_SYMBOL, 40), + (31, HUFFMAN_EMIT_SYMBOL, 40), + (41, HUFFMAN_EMIT_SYMBOL, 40), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 40), + (3, HUFFMAN_EMIT_SYMBOL, 41), + (6, HUFFMAN_EMIT_SYMBOL, 41), + (10, HUFFMAN_EMIT_SYMBOL, 41), + (15, HUFFMAN_EMIT_SYMBOL, 41), + (24, HUFFMAN_EMIT_SYMBOL, 41), + (31, HUFFMAN_EMIT_SYMBOL, 41), + (41, HUFFMAN_EMIT_SYMBOL, 41), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 41), + + # Node 78 + (2, HUFFMAN_EMIT_SYMBOL, 63), + (9, HUFFMAN_EMIT_SYMBOL, 63), + (23, HUFFMAN_EMIT_SYMBOL, 63), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 63), + (1, HUFFMAN_EMIT_SYMBOL, 39), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 39), + (1, HUFFMAN_EMIT_SYMBOL, 43), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 43), + (1, HUFFMAN_EMIT_SYMBOL, 124), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 124), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 35), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 62), + (86, 0, 0), + (87, 0, 0), + (89, 0, 0), + (90, 0, 0), + + # Node 79 + (3, HUFFMAN_EMIT_SYMBOL, 63), + (6, HUFFMAN_EMIT_SYMBOL, 63), + (10, HUFFMAN_EMIT_SYMBOL, 63), + (15, HUFFMAN_EMIT_SYMBOL, 63), + (24, HUFFMAN_EMIT_SYMBOL, 63), + (31, HUFFMAN_EMIT_SYMBOL, 63), + (41, HUFFMAN_EMIT_SYMBOL, 63), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 63), + (2, HUFFMAN_EMIT_SYMBOL, 39), + (9, HUFFMAN_EMIT_SYMBOL, 39), + (23, HUFFMAN_EMIT_SYMBOL, 39), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 39), + (2, HUFFMAN_EMIT_SYMBOL, 43), + (9, HUFFMAN_EMIT_SYMBOL, 43), + (23, HUFFMAN_EMIT_SYMBOL, 43), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 43), + + # Node 80 + (3, HUFFMAN_EMIT_SYMBOL, 39), + (6, HUFFMAN_EMIT_SYMBOL, 39), + (10, HUFFMAN_EMIT_SYMBOL, 39), + (15, HUFFMAN_EMIT_SYMBOL, 39), + (24, HUFFMAN_EMIT_SYMBOL, 39), + (31, HUFFMAN_EMIT_SYMBOL, 39), + (41, HUFFMAN_EMIT_SYMBOL, 39), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 39), + (3, HUFFMAN_EMIT_SYMBOL, 43), + (6, HUFFMAN_EMIT_SYMBOL, 43), + (10, HUFFMAN_EMIT_SYMBOL, 43), + (15, HUFFMAN_EMIT_SYMBOL, 43), + (24, HUFFMAN_EMIT_SYMBOL, 43), + (31, HUFFMAN_EMIT_SYMBOL, 43), + (41, HUFFMAN_EMIT_SYMBOL, 43), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 43), + + # Node 81 + (2, HUFFMAN_EMIT_SYMBOL, 124), + (9, HUFFMAN_EMIT_SYMBOL, 124), + (23, HUFFMAN_EMIT_SYMBOL, 124), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 124), + (1, HUFFMAN_EMIT_SYMBOL, 35), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 35), + (1, HUFFMAN_EMIT_SYMBOL, 62), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 62), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 0), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 36), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 64), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 91), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 93), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 126), + (91, 0, 0), + (92, 0, 0), + + # Node 82 + (3, HUFFMAN_EMIT_SYMBOL, 124), + (6, HUFFMAN_EMIT_SYMBOL, 124), + (10, HUFFMAN_EMIT_SYMBOL, 124), + (15, HUFFMAN_EMIT_SYMBOL, 124), + (24, HUFFMAN_EMIT_SYMBOL, 124), + (31, HUFFMAN_EMIT_SYMBOL, 124), + (41, HUFFMAN_EMIT_SYMBOL, 124), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 124), + (2, HUFFMAN_EMIT_SYMBOL, 35), + (9, HUFFMAN_EMIT_SYMBOL, 35), + (23, HUFFMAN_EMIT_SYMBOL, 35), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 35), + (2, HUFFMAN_EMIT_SYMBOL, 62), + (9, HUFFMAN_EMIT_SYMBOL, 62), + (23, HUFFMAN_EMIT_SYMBOL, 62), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 62), + + # Node 83 + (3, HUFFMAN_EMIT_SYMBOL, 35), + (6, HUFFMAN_EMIT_SYMBOL, 35), + (10, HUFFMAN_EMIT_SYMBOL, 35), + (15, HUFFMAN_EMIT_SYMBOL, 35), + (24, HUFFMAN_EMIT_SYMBOL, 35), + (31, HUFFMAN_EMIT_SYMBOL, 35), + (41, HUFFMAN_EMIT_SYMBOL, 35), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 35), + (3, HUFFMAN_EMIT_SYMBOL, 62), + (6, HUFFMAN_EMIT_SYMBOL, 62), + (10, HUFFMAN_EMIT_SYMBOL, 62), + (15, HUFFMAN_EMIT_SYMBOL, 62), + (24, HUFFMAN_EMIT_SYMBOL, 62), + (31, HUFFMAN_EMIT_SYMBOL, 62), + (41, HUFFMAN_EMIT_SYMBOL, 62), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 62), + + # Node 84 + (1, HUFFMAN_EMIT_SYMBOL, 0), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 0), + (1, HUFFMAN_EMIT_SYMBOL, 36), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 36), + (1, HUFFMAN_EMIT_SYMBOL, 64), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 64), + (1, HUFFMAN_EMIT_SYMBOL, 91), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 91), + (1, HUFFMAN_EMIT_SYMBOL, 93), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 93), + (1, HUFFMAN_EMIT_SYMBOL, 126), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 126), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 94), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 125), + (93, 0, 0), + (94, 0, 0), + + # Node 85 + (2, HUFFMAN_EMIT_SYMBOL, 0), + (9, HUFFMAN_EMIT_SYMBOL, 0), + (23, HUFFMAN_EMIT_SYMBOL, 0), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 0), + (2, HUFFMAN_EMIT_SYMBOL, 36), + (9, HUFFMAN_EMIT_SYMBOL, 36), + (23, HUFFMAN_EMIT_SYMBOL, 36), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 36), + (2, HUFFMAN_EMIT_SYMBOL, 64), + (9, HUFFMAN_EMIT_SYMBOL, 64), + (23, HUFFMAN_EMIT_SYMBOL, 64), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 64), + (2, HUFFMAN_EMIT_SYMBOL, 91), + (9, HUFFMAN_EMIT_SYMBOL, 91), + (23, HUFFMAN_EMIT_SYMBOL, 91), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 91), + + # Node 86 + (3, HUFFMAN_EMIT_SYMBOL, 0), + (6, HUFFMAN_EMIT_SYMBOL, 0), + (10, HUFFMAN_EMIT_SYMBOL, 0), + (15, HUFFMAN_EMIT_SYMBOL, 0), + (24, HUFFMAN_EMIT_SYMBOL, 0), + (31, HUFFMAN_EMIT_SYMBOL, 0), + (41, HUFFMAN_EMIT_SYMBOL, 0), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 0), + (3, HUFFMAN_EMIT_SYMBOL, 36), + (6, HUFFMAN_EMIT_SYMBOL, 36), + (10, HUFFMAN_EMIT_SYMBOL, 36), + (15, HUFFMAN_EMIT_SYMBOL, 36), + (24, HUFFMAN_EMIT_SYMBOL, 36), + (31, HUFFMAN_EMIT_SYMBOL, 36), + (41, HUFFMAN_EMIT_SYMBOL, 36), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 36), + + # Node 87 + (3, HUFFMAN_EMIT_SYMBOL, 64), + (6, HUFFMAN_EMIT_SYMBOL, 64), + (10, HUFFMAN_EMIT_SYMBOL, 64), + (15, HUFFMAN_EMIT_SYMBOL, 64), + (24, HUFFMAN_EMIT_SYMBOL, 64), + (31, HUFFMAN_EMIT_SYMBOL, 64), + (41, HUFFMAN_EMIT_SYMBOL, 64), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 64), + (3, HUFFMAN_EMIT_SYMBOL, 91), + (6, HUFFMAN_EMIT_SYMBOL, 91), + (10, HUFFMAN_EMIT_SYMBOL, 91), + (15, HUFFMAN_EMIT_SYMBOL, 91), + (24, HUFFMAN_EMIT_SYMBOL, 91), + (31, HUFFMAN_EMIT_SYMBOL, 91), + (41, HUFFMAN_EMIT_SYMBOL, 91), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 91), + + # Node 88 + (2, HUFFMAN_EMIT_SYMBOL, 93), + (9, HUFFMAN_EMIT_SYMBOL, 93), + (23, HUFFMAN_EMIT_SYMBOL, 93), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 93), + (2, HUFFMAN_EMIT_SYMBOL, 126), + (9, HUFFMAN_EMIT_SYMBOL, 126), + (23, HUFFMAN_EMIT_SYMBOL, 126), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 126), + (1, HUFFMAN_EMIT_SYMBOL, 94), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 94), + (1, HUFFMAN_EMIT_SYMBOL, 125), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 125), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 60), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 96), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 123), + (95, 0, 0), + + # Node 89 + (3, HUFFMAN_EMIT_SYMBOL, 93), + (6, HUFFMAN_EMIT_SYMBOL, 93), + (10, HUFFMAN_EMIT_SYMBOL, 93), + (15, HUFFMAN_EMIT_SYMBOL, 93), + (24, HUFFMAN_EMIT_SYMBOL, 93), + (31, HUFFMAN_EMIT_SYMBOL, 93), + (41, HUFFMAN_EMIT_SYMBOL, 93), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 93), + (3, HUFFMAN_EMIT_SYMBOL, 126), + (6, HUFFMAN_EMIT_SYMBOL, 126), + (10, HUFFMAN_EMIT_SYMBOL, 126), + (15, HUFFMAN_EMIT_SYMBOL, 126), + (24, HUFFMAN_EMIT_SYMBOL, 126), + (31, HUFFMAN_EMIT_SYMBOL, 126), + (41, HUFFMAN_EMIT_SYMBOL, 126), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 126), + + # Node 90 + (2, HUFFMAN_EMIT_SYMBOL, 94), + (9, HUFFMAN_EMIT_SYMBOL, 94), + (23, HUFFMAN_EMIT_SYMBOL, 94), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 94), + (2, HUFFMAN_EMIT_SYMBOL, 125), + (9, HUFFMAN_EMIT_SYMBOL, 125), + (23, HUFFMAN_EMIT_SYMBOL, 125), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 125), + (1, HUFFMAN_EMIT_SYMBOL, 60), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 60), + (1, HUFFMAN_EMIT_SYMBOL, 96), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 96), + (1, HUFFMAN_EMIT_SYMBOL, 123), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 123), + (96, 0, 0), + (110, 0, 0), + + # Node 91 + (3, HUFFMAN_EMIT_SYMBOL, 94), + (6, HUFFMAN_EMIT_SYMBOL, 94), + (10, HUFFMAN_EMIT_SYMBOL, 94), + (15, HUFFMAN_EMIT_SYMBOL, 94), + (24, HUFFMAN_EMIT_SYMBOL, 94), + (31, HUFFMAN_EMIT_SYMBOL, 94), + (41, HUFFMAN_EMIT_SYMBOL, 94), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 94), + (3, HUFFMAN_EMIT_SYMBOL, 125), + (6, HUFFMAN_EMIT_SYMBOL, 125), + (10, HUFFMAN_EMIT_SYMBOL, 125), + (15, HUFFMAN_EMIT_SYMBOL, 125), + (24, HUFFMAN_EMIT_SYMBOL, 125), + (31, HUFFMAN_EMIT_SYMBOL, 125), + (41, HUFFMAN_EMIT_SYMBOL, 125), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 125), + + # Node 92 + (2, HUFFMAN_EMIT_SYMBOL, 60), + (9, HUFFMAN_EMIT_SYMBOL, 60), + (23, HUFFMAN_EMIT_SYMBOL, 60), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 60), + (2, HUFFMAN_EMIT_SYMBOL, 96), + (9, HUFFMAN_EMIT_SYMBOL, 96), + (23, HUFFMAN_EMIT_SYMBOL, 96), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 96), + (2, HUFFMAN_EMIT_SYMBOL, 123), + (9, HUFFMAN_EMIT_SYMBOL, 123), + (23, HUFFMAN_EMIT_SYMBOL, 123), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 123), + (97, 0, 0), + (101, 0, 0), + (111, 0, 0), + (133, 0, 0), + + # Node 93 + (3, HUFFMAN_EMIT_SYMBOL, 60), + (6, HUFFMAN_EMIT_SYMBOL, 60), + (10, HUFFMAN_EMIT_SYMBOL, 60), + (15, HUFFMAN_EMIT_SYMBOL, 60), + (24, HUFFMAN_EMIT_SYMBOL, 60), + (31, HUFFMAN_EMIT_SYMBOL, 60), + (41, HUFFMAN_EMIT_SYMBOL, 60), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 60), + (3, HUFFMAN_EMIT_SYMBOL, 96), + (6, HUFFMAN_EMIT_SYMBOL, 96), + (10, HUFFMAN_EMIT_SYMBOL, 96), + (15, HUFFMAN_EMIT_SYMBOL, 96), + (24, HUFFMAN_EMIT_SYMBOL, 96), + (31, HUFFMAN_EMIT_SYMBOL, 96), + (41, HUFFMAN_EMIT_SYMBOL, 96), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 96), + + # Node 94 + (3, HUFFMAN_EMIT_SYMBOL, 123), + (6, HUFFMAN_EMIT_SYMBOL, 123), + (10, HUFFMAN_EMIT_SYMBOL, 123), + (15, HUFFMAN_EMIT_SYMBOL, 123), + (24, HUFFMAN_EMIT_SYMBOL, 123), + (31, HUFFMAN_EMIT_SYMBOL, 123), + (41, HUFFMAN_EMIT_SYMBOL, 123), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 123), + (98, 0, 0), + (99, 0, 0), + (102, 0, 0), + (105, 0, 0), + (112, 0, 0), + (119, 0, 0), + (134, 0, 0), + (153, 0, 0), + + # Node 95 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 92), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 195), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 208), + (100, 0, 0), + (103, 0, 0), + (104, 0, 0), + (106, 0, 0), + (107, 0, 0), + (113, 0, 0), + (116, 0, 0), + (120, 0, 0), + (126, 0, 0), + (135, 0, 0), + (142, 0, 0), + (154, 0, 0), + (169, 0, 0), + + # Node 96 + (1, HUFFMAN_EMIT_SYMBOL, 92), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 92), + (1, HUFFMAN_EMIT_SYMBOL, 195), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 195), + (1, HUFFMAN_EMIT_SYMBOL, 208), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 208), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 128), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 130), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 131), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 162), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 184), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 194), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 224), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 226), + (108, 0, 0), + (109, 0, 0), + + # Node 97 + (2, HUFFMAN_EMIT_SYMBOL, 92), + (9, HUFFMAN_EMIT_SYMBOL, 92), + (23, HUFFMAN_EMIT_SYMBOL, 92), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 92), + (2, HUFFMAN_EMIT_SYMBOL, 195), + (9, HUFFMAN_EMIT_SYMBOL, 195), + (23, HUFFMAN_EMIT_SYMBOL, 195), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 195), + (2, HUFFMAN_EMIT_SYMBOL, 208), + (9, HUFFMAN_EMIT_SYMBOL, 208), + (23, HUFFMAN_EMIT_SYMBOL, 208), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 208), + (1, HUFFMAN_EMIT_SYMBOL, 128), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 128), + (1, HUFFMAN_EMIT_SYMBOL, 130), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 130), + + # Node 98 + (3, HUFFMAN_EMIT_SYMBOL, 92), + (6, HUFFMAN_EMIT_SYMBOL, 92), + (10, HUFFMAN_EMIT_SYMBOL, 92), + (15, HUFFMAN_EMIT_SYMBOL, 92), + (24, HUFFMAN_EMIT_SYMBOL, 92), + (31, HUFFMAN_EMIT_SYMBOL, 92), + (41, HUFFMAN_EMIT_SYMBOL, 92), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 92), + (3, HUFFMAN_EMIT_SYMBOL, 195), + (6, HUFFMAN_EMIT_SYMBOL, 195), + (10, HUFFMAN_EMIT_SYMBOL, 195), + (15, HUFFMAN_EMIT_SYMBOL, 195), + (24, HUFFMAN_EMIT_SYMBOL, 195), + (31, HUFFMAN_EMIT_SYMBOL, 195), + (41, HUFFMAN_EMIT_SYMBOL, 195), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 195), + + # Node 99 + (3, HUFFMAN_EMIT_SYMBOL, 208), + (6, HUFFMAN_EMIT_SYMBOL, 208), + (10, HUFFMAN_EMIT_SYMBOL, 208), + (15, HUFFMAN_EMIT_SYMBOL, 208), + (24, HUFFMAN_EMIT_SYMBOL, 208), + (31, HUFFMAN_EMIT_SYMBOL, 208), + (41, HUFFMAN_EMIT_SYMBOL, 208), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 208), + (2, HUFFMAN_EMIT_SYMBOL, 128), + (9, HUFFMAN_EMIT_SYMBOL, 128), + (23, HUFFMAN_EMIT_SYMBOL, 128), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 128), + (2, HUFFMAN_EMIT_SYMBOL, 130), + (9, HUFFMAN_EMIT_SYMBOL, 130), + (23, HUFFMAN_EMIT_SYMBOL, 130), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 130), + + # Node 100 + (3, HUFFMAN_EMIT_SYMBOL, 128), + (6, HUFFMAN_EMIT_SYMBOL, 128), + (10, HUFFMAN_EMIT_SYMBOL, 128), + (15, HUFFMAN_EMIT_SYMBOL, 128), + (24, HUFFMAN_EMIT_SYMBOL, 128), + (31, HUFFMAN_EMIT_SYMBOL, 128), + (41, HUFFMAN_EMIT_SYMBOL, 128), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 128), + (3, HUFFMAN_EMIT_SYMBOL, 130), + (6, HUFFMAN_EMIT_SYMBOL, 130), + (10, HUFFMAN_EMIT_SYMBOL, 130), + (15, HUFFMAN_EMIT_SYMBOL, 130), + (24, HUFFMAN_EMIT_SYMBOL, 130), + (31, HUFFMAN_EMIT_SYMBOL, 130), + (41, HUFFMAN_EMIT_SYMBOL, 130), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 130), + + # Node 101 + (1, HUFFMAN_EMIT_SYMBOL, 131), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 131), + (1, HUFFMAN_EMIT_SYMBOL, 162), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 162), + (1, HUFFMAN_EMIT_SYMBOL, 184), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 184), + (1, HUFFMAN_EMIT_SYMBOL, 194), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 194), + (1, HUFFMAN_EMIT_SYMBOL, 224), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 224), + (1, HUFFMAN_EMIT_SYMBOL, 226), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 226), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 153), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 161), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 167), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 172), + + # Node 102 + (2, HUFFMAN_EMIT_SYMBOL, 131), + (9, HUFFMAN_EMIT_SYMBOL, 131), + (23, HUFFMAN_EMIT_SYMBOL, 131), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 131), + (2, HUFFMAN_EMIT_SYMBOL, 162), + (9, HUFFMAN_EMIT_SYMBOL, 162), + (23, HUFFMAN_EMIT_SYMBOL, 162), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 162), + (2, HUFFMAN_EMIT_SYMBOL, 184), + (9, HUFFMAN_EMIT_SYMBOL, 184), + (23, HUFFMAN_EMIT_SYMBOL, 184), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 184), + (2, HUFFMAN_EMIT_SYMBOL, 194), + (9, HUFFMAN_EMIT_SYMBOL, 194), + (23, HUFFMAN_EMIT_SYMBOL, 194), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 194), + + # Node 103 + (3, HUFFMAN_EMIT_SYMBOL, 131), + (6, HUFFMAN_EMIT_SYMBOL, 131), + (10, HUFFMAN_EMIT_SYMBOL, 131), + (15, HUFFMAN_EMIT_SYMBOL, 131), + (24, HUFFMAN_EMIT_SYMBOL, 131), + (31, HUFFMAN_EMIT_SYMBOL, 131), + (41, HUFFMAN_EMIT_SYMBOL, 131), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 131), + (3, HUFFMAN_EMIT_SYMBOL, 162), + (6, HUFFMAN_EMIT_SYMBOL, 162), + (10, HUFFMAN_EMIT_SYMBOL, 162), + (15, HUFFMAN_EMIT_SYMBOL, 162), + (24, HUFFMAN_EMIT_SYMBOL, 162), + (31, HUFFMAN_EMIT_SYMBOL, 162), + (41, HUFFMAN_EMIT_SYMBOL, 162), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 162), + + # Node 104 + (3, HUFFMAN_EMIT_SYMBOL, 184), + (6, HUFFMAN_EMIT_SYMBOL, 184), + (10, HUFFMAN_EMIT_SYMBOL, 184), + (15, HUFFMAN_EMIT_SYMBOL, 184), + (24, HUFFMAN_EMIT_SYMBOL, 184), + (31, HUFFMAN_EMIT_SYMBOL, 184), + (41, HUFFMAN_EMIT_SYMBOL, 184), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 184), + (3, HUFFMAN_EMIT_SYMBOL, 194), + (6, HUFFMAN_EMIT_SYMBOL, 194), + (10, HUFFMAN_EMIT_SYMBOL, 194), + (15, HUFFMAN_EMIT_SYMBOL, 194), + (24, HUFFMAN_EMIT_SYMBOL, 194), + (31, HUFFMAN_EMIT_SYMBOL, 194), + (41, HUFFMAN_EMIT_SYMBOL, 194), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 194), + + # Node 105 + (2, HUFFMAN_EMIT_SYMBOL, 224), + (9, HUFFMAN_EMIT_SYMBOL, 224), + (23, HUFFMAN_EMIT_SYMBOL, 224), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 224), + (2, HUFFMAN_EMIT_SYMBOL, 226), + (9, HUFFMAN_EMIT_SYMBOL, 226), + (23, HUFFMAN_EMIT_SYMBOL, 226), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 226), + (1, HUFFMAN_EMIT_SYMBOL, 153), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 153), + (1, HUFFMAN_EMIT_SYMBOL, 161), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 161), + (1, HUFFMAN_EMIT_SYMBOL, 167), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 167), + (1, HUFFMAN_EMIT_SYMBOL, 172), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 172), + + # Node 106 + (3, HUFFMAN_EMIT_SYMBOL, 224), + (6, HUFFMAN_EMIT_SYMBOL, 224), + (10, HUFFMAN_EMIT_SYMBOL, 224), + (15, HUFFMAN_EMIT_SYMBOL, 224), + (24, HUFFMAN_EMIT_SYMBOL, 224), + (31, HUFFMAN_EMIT_SYMBOL, 224), + (41, HUFFMAN_EMIT_SYMBOL, 224), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 224), + (3, HUFFMAN_EMIT_SYMBOL, 226), + (6, HUFFMAN_EMIT_SYMBOL, 226), + (10, HUFFMAN_EMIT_SYMBOL, 226), + (15, HUFFMAN_EMIT_SYMBOL, 226), + (24, HUFFMAN_EMIT_SYMBOL, 226), + (31, HUFFMAN_EMIT_SYMBOL, 226), + (41, HUFFMAN_EMIT_SYMBOL, 226), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 226), + + # Node 107 + (2, HUFFMAN_EMIT_SYMBOL, 153), + (9, HUFFMAN_EMIT_SYMBOL, 153), + (23, HUFFMAN_EMIT_SYMBOL, 153), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 153), + (2, HUFFMAN_EMIT_SYMBOL, 161), + (9, HUFFMAN_EMIT_SYMBOL, 161), + (23, HUFFMAN_EMIT_SYMBOL, 161), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 161), + (2, HUFFMAN_EMIT_SYMBOL, 167), + (9, HUFFMAN_EMIT_SYMBOL, 167), + (23, HUFFMAN_EMIT_SYMBOL, 167), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 167), + (2, HUFFMAN_EMIT_SYMBOL, 172), + (9, HUFFMAN_EMIT_SYMBOL, 172), + (23, HUFFMAN_EMIT_SYMBOL, 172), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 172), + + # Node 108 + (3, HUFFMAN_EMIT_SYMBOL, 153), + (6, HUFFMAN_EMIT_SYMBOL, 153), + (10, HUFFMAN_EMIT_SYMBOL, 153), + (15, HUFFMAN_EMIT_SYMBOL, 153), + (24, HUFFMAN_EMIT_SYMBOL, 153), + (31, HUFFMAN_EMIT_SYMBOL, 153), + (41, HUFFMAN_EMIT_SYMBOL, 153), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 153), + (3, HUFFMAN_EMIT_SYMBOL, 161), + (6, HUFFMAN_EMIT_SYMBOL, 161), + (10, HUFFMAN_EMIT_SYMBOL, 161), + (15, HUFFMAN_EMIT_SYMBOL, 161), + (24, HUFFMAN_EMIT_SYMBOL, 161), + (31, HUFFMAN_EMIT_SYMBOL, 161), + (41, HUFFMAN_EMIT_SYMBOL, 161), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 161), + + # Node 109 + (3, HUFFMAN_EMIT_SYMBOL, 167), + (6, HUFFMAN_EMIT_SYMBOL, 167), + (10, HUFFMAN_EMIT_SYMBOL, 167), + (15, HUFFMAN_EMIT_SYMBOL, 167), + (24, HUFFMAN_EMIT_SYMBOL, 167), + (31, HUFFMAN_EMIT_SYMBOL, 167), + (41, HUFFMAN_EMIT_SYMBOL, 167), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 167), + (3, HUFFMAN_EMIT_SYMBOL, 172), + (6, HUFFMAN_EMIT_SYMBOL, 172), + (10, HUFFMAN_EMIT_SYMBOL, 172), + (15, HUFFMAN_EMIT_SYMBOL, 172), + (24, HUFFMAN_EMIT_SYMBOL, 172), + (31, HUFFMAN_EMIT_SYMBOL, 172), + (41, HUFFMAN_EMIT_SYMBOL, 172), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 172), + + # Node 110 + (114, 0, 0), + (115, 0, 0), + (117, 0, 0), + (118, 0, 0), + (121, 0, 0), + (123, 0, 0), + (127, 0, 0), + (130, 0, 0), + (136, 0, 0), + (139, 0, 0), + (143, 0, 0), + (146, 0, 0), + (155, 0, 0), + (162, 0, 0), + (170, 0, 0), + (180, 0, 0), + + # Node 111 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 176), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 177), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 179), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 209), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 216), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 217), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 227), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 229), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 230), + (122, 0, 0), + (124, 0, 0), + (125, 0, 0), + (128, 0, 0), + (129, 0, 0), + (131, 0, 0), + (132, 0, 0), + + # Node 112 + (1, HUFFMAN_EMIT_SYMBOL, 176), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 176), + (1, HUFFMAN_EMIT_SYMBOL, 177), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 177), + (1, HUFFMAN_EMIT_SYMBOL, 179), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 179), + (1, HUFFMAN_EMIT_SYMBOL, 209), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 209), + (1, HUFFMAN_EMIT_SYMBOL, 216), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 216), + (1, HUFFMAN_EMIT_SYMBOL, 217), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 217), + (1, HUFFMAN_EMIT_SYMBOL, 227), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 227), + (1, HUFFMAN_EMIT_SYMBOL, 229), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 229), + + # Node 113 + (2, HUFFMAN_EMIT_SYMBOL, 176), + (9, HUFFMAN_EMIT_SYMBOL, 176), + (23, HUFFMAN_EMIT_SYMBOL, 176), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 176), + (2, HUFFMAN_EMIT_SYMBOL, 177), + (9, HUFFMAN_EMIT_SYMBOL, 177), + (23, HUFFMAN_EMIT_SYMBOL, 177), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 177), + (2, HUFFMAN_EMIT_SYMBOL, 179), + (9, HUFFMAN_EMIT_SYMBOL, 179), + (23, HUFFMAN_EMIT_SYMBOL, 179), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 179), + (2, HUFFMAN_EMIT_SYMBOL, 209), + (9, HUFFMAN_EMIT_SYMBOL, 209), + (23, HUFFMAN_EMIT_SYMBOL, 209), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 209), + + # Node 114 + (3, HUFFMAN_EMIT_SYMBOL, 176), + (6, HUFFMAN_EMIT_SYMBOL, 176), + (10, HUFFMAN_EMIT_SYMBOL, 176), + (15, HUFFMAN_EMIT_SYMBOL, 176), + (24, HUFFMAN_EMIT_SYMBOL, 176), + (31, HUFFMAN_EMIT_SYMBOL, 176), + (41, HUFFMAN_EMIT_SYMBOL, 176), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 176), + (3, HUFFMAN_EMIT_SYMBOL, 177), + (6, HUFFMAN_EMIT_SYMBOL, 177), + (10, HUFFMAN_EMIT_SYMBOL, 177), + (15, HUFFMAN_EMIT_SYMBOL, 177), + (24, HUFFMAN_EMIT_SYMBOL, 177), + (31, HUFFMAN_EMIT_SYMBOL, 177), + (41, HUFFMAN_EMIT_SYMBOL, 177), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 177), + + # Node 115 + (3, HUFFMAN_EMIT_SYMBOL, 179), + (6, HUFFMAN_EMIT_SYMBOL, 179), + (10, HUFFMAN_EMIT_SYMBOL, 179), + (15, HUFFMAN_EMIT_SYMBOL, 179), + (24, HUFFMAN_EMIT_SYMBOL, 179), + (31, HUFFMAN_EMIT_SYMBOL, 179), + (41, HUFFMAN_EMIT_SYMBOL, 179), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 179), + (3, HUFFMAN_EMIT_SYMBOL, 209), + (6, HUFFMAN_EMIT_SYMBOL, 209), + (10, HUFFMAN_EMIT_SYMBOL, 209), + (15, HUFFMAN_EMIT_SYMBOL, 209), + (24, HUFFMAN_EMIT_SYMBOL, 209), + (31, HUFFMAN_EMIT_SYMBOL, 209), + (41, HUFFMAN_EMIT_SYMBOL, 209), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 209), + + # Node 116 + (2, HUFFMAN_EMIT_SYMBOL, 216), + (9, HUFFMAN_EMIT_SYMBOL, 216), + (23, HUFFMAN_EMIT_SYMBOL, 216), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 216), + (2, HUFFMAN_EMIT_SYMBOL, 217), + (9, HUFFMAN_EMIT_SYMBOL, 217), + (23, HUFFMAN_EMIT_SYMBOL, 217), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 217), + (2, HUFFMAN_EMIT_SYMBOL, 227), + (9, HUFFMAN_EMIT_SYMBOL, 227), + (23, HUFFMAN_EMIT_SYMBOL, 227), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 227), + (2, HUFFMAN_EMIT_SYMBOL, 229), + (9, HUFFMAN_EMIT_SYMBOL, 229), + (23, HUFFMAN_EMIT_SYMBOL, 229), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 229), + + # Node 117 + (3, HUFFMAN_EMIT_SYMBOL, 216), + (6, HUFFMAN_EMIT_SYMBOL, 216), + (10, HUFFMAN_EMIT_SYMBOL, 216), + (15, HUFFMAN_EMIT_SYMBOL, 216), + (24, HUFFMAN_EMIT_SYMBOL, 216), + (31, HUFFMAN_EMIT_SYMBOL, 216), + (41, HUFFMAN_EMIT_SYMBOL, 216), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 216), + (3, HUFFMAN_EMIT_SYMBOL, 217), + (6, HUFFMAN_EMIT_SYMBOL, 217), + (10, HUFFMAN_EMIT_SYMBOL, 217), + (15, HUFFMAN_EMIT_SYMBOL, 217), + (24, HUFFMAN_EMIT_SYMBOL, 217), + (31, HUFFMAN_EMIT_SYMBOL, 217), + (41, HUFFMAN_EMIT_SYMBOL, 217), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 217), + + # Node 118 + (3, HUFFMAN_EMIT_SYMBOL, 227), + (6, HUFFMAN_EMIT_SYMBOL, 227), + (10, HUFFMAN_EMIT_SYMBOL, 227), + (15, HUFFMAN_EMIT_SYMBOL, 227), + (24, HUFFMAN_EMIT_SYMBOL, 227), + (31, HUFFMAN_EMIT_SYMBOL, 227), + (41, HUFFMAN_EMIT_SYMBOL, 227), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 227), + (3, HUFFMAN_EMIT_SYMBOL, 229), + (6, HUFFMAN_EMIT_SYMBOL, 229), + (10, HUFFMAN_EMIT_SYMBOL, 229), + (15, HUFFMAN_EMIT_SYMBOL, 229), + (24, HUFFMAN_EMIT_SYMBOL, 229), + (31, HUFFMAN_EMIT_SYMBOL, 229), + (41, HUFFMAN_EMIT_SYMBOL, 229), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 229), + + # Node 119 + (1, HUFFMAN_EMIT_SYMBOL, 230), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 230), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 129), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 132), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 133), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 134), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 136), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 146), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 154), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 156), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 160), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 163), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 164), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 169), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 170), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 173), + + # Node 120 + (2, HUFFMAN_EMIT_SYMBOL, 230), + (9, HUFFMAN_EMIT_SYMBOL, 230), + (23, HUFFMAN_EMIT_SYMBOL, 230), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 230), + (1, HUFFMAN_EMIT_SYMBOL, 129), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 129), + (1, HUFFMAN_EMIT_SYMBOL, 132), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 132), + (1, HUFFMAN_EMIT_SYMBOL, 133), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 133), + (1, HUFFMAN_EMIT_SYMBOL, 134), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 134), + (1, HUFFMAN_EMIT_SYMBOL, 136), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 136), + (1, HUFFMAN_EMIT_SYMBOL, 146), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 146), + + # Node 121 + (3, HUFFMAN_EMIT_SYMBOL, 230), + (6, HUFFMAN_EMIT_SYMBOL, 230), + (10, HUFFMAN_EMIT_SYMBOL, 230), + (15, HUFFMAN_EMIT_SYMBOL, 230), + (24, HUFFMAN_EMIT_SYMBOL, 230), + (31, HUFFMAN_EMIT_SYMBOL, 230), + (41, HUFFMAN_EMIT_SYMBOL, 230), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 230), + (2, HUFFMAN_EMIT_SYMBOL, 129), + (9, HUFFMAN_EMIT_SYMBOL, 129), + (23, HUFFMAN_EMIT_SYMBOL, 129), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 129), + (2, HUFFMAN_EMIT_SYMBOL, 132), + (9, HUFFMAN_EMIT_SYMBOL, 132), + (23, HUFFMAN_EMIT_SYMBOL, 132), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 132), + + # Node 122 + (3, HUFFMAN_EMIT_SYMBOL, 129), + (6, HUFFMAN_EMIT_SYMBOL, 129), + (10, HUFFMAN_EMIT_SYMBOL, 129), + (15, HUFFMAN_EMIT_SYMBOL, 129), + (24, HUFFMAN_EMIT_SYMBOL, 129), + (31, HUFFMAN_EMIT_SYMBOL, 129), + (41, HUFFMAN_EMIT_SYMBOL, 129), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 129), + (3, HUFFMAN_EMIT_SYMBOL, 132), + (6, HUFFMAN_EMIT_SYMBOL, 132), + (10, HUFFMAN_EMIT_SYMBOL, 132), + (15, HUFFMAN_EMIT_SYMBOL, 132), + (24, HUFFMAN_EMIT_SYMBOL, 132), + (31, HUFFMAN_EMIT_SYMBOL, 132), + (41, HUFFMAN_EMIT_SYMBOL, 132), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 132), + + # Node 123 + (2, HUFFMAN_EMIT_SYMBOL, 133), + (9, HUFFMAN_EMIT_SYMBOL, 133), + (23, HUFFMAN_EMIT_SYMBOL, 133), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 133), + (2, HUFFMAN_EMIT_SYMBOL, 134), + (9, HUFFMAN_EMIT_SYMBOL, 134), + (23, HUFFMAN_EMIT_SYMBOL, 134), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 134), + (2, HUFFMAN_EMIT_SYMBOL, 136), + (9, HUFFMAN_EMIT_SYMBOL, 136), + (23, HUFFMAN_EMIT_SYMBOL, 136), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 136), + (2, HUFFMAN_EMIT_SYMBOL, 146), + (9, HUFFMAN_EMIT_SYMBOL, 146), + (23, HUFFMAN_EMIT_SYMBOL, 146), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 146), + + # Node 124 + (3, HUFFMAN_EMIT_SYMBOL, 133), + (6, HUFFMAN_EMIT_SYMBOL, 133), + (10, HUFFMAN_EMIT_SYMBOL, 133), + (15, HUFFMAN_EMIT_SYMBOL, 133), + (24, HUFFMAN_EMIT_SYMBOL, 133), + (31, HUFFMAN_EMIT_SYMBOL, 133), + (41, HUFFMAN_EMIT_SYMBOL, 133), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 133), + (3, HUFFMAN_EMIT_SYMBOL, 134), + (6, HUFFMAN_EMIT_SYMBOL, 134), + (10, HUFFMAN_EMIT_SYMBOL, 134), + (15, HUFFMAN_EMIT_SYMBOL, 134), + (24, HUFFMAN_EMIT_SYMBOL, 134), + (31, HUFFMAN_EMIT_SYMBOL, 134), + (41, HUFFMAN_EMIT_SYMBOL, 134), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 134), + + # Node 125 + (3, HUFFMAN_EMIT_SYMBOL, 136), + (6, HUFFMAN_EMIT_SYMBOL, 136), + (10, HUFFMAN_EMIT_SYMBOL, 136), + (15, HUFFMAN_EMIT_SYMBOL, 136), + (24, HUFFMAN_EMIT_SYMBOL, 136), + (31, HUFFMAN_EMIT_SYMBOL, 136), + (41, HUFFMAN_EMIT_SYMBOL, 136), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 136), + (3, HUFFMAN_EMIT_SYMBOL, 146), + (6, HUFFMAN_EMIT_SYMBOL, 146), + (10, HUFFMAN_EMIT_SYMBOL, 146), + (15, HUFFMAN_EMIT_SYMBOL, 146), + (24, HUFFMAN_EMIT_SYMBOL, 146), + (31, HUFFMAN_EMIT_SYMBOL, 146), + (41, HUFFMAN_EMIT_SYMBOL, 146), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 146), + + # Node 126 + (1, HUFFMAN_EMIT_SYMBOL, 154), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 154), + (1, HUFFMAN_EMIT_SYMBOL, 156), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 156), + (1, HUFFMAN_EMIT_SYMBOL, 160), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 160), + (1, HUFFMAN_EMIT_SYMBOL, 163), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 163), + (1, HUFFMAN_EMIT_SYMBOL, 164), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 164), + (1, HUFFMAN_EMIT_SYMBOL, 169), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 169), + (1, HUFFMAN_EMIT_SYMBOL, 170), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 170), + (1, HUFFMAN_EMIT_SYMBOL, 173), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 173), + + # Node 127 + (2, HUFFMAN_EMIT_SYMBOL, 154), + (9, HUFFMAN_EMIT_SYMBOL, 154), + (23, HUFFMAN_EMIT_SYMBOL, 154), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 154), + (2, HUFFMAN_EMIT_SYMBOL, 156), + (9, HUFFMAN_EMIT_SYMBOL, 156), + (23, HUFFMAN_EMIT_SYMBOL, 156), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 156), + (2, HUFFMAN_EMIT_SYMBOL, 160), + (9, HUFFMAN_EMIT_SYMBOL, 160), + (23, HUFFMAN_EMIT_SYMBOL, 160), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 160), + (2, HUFFMAN_EMIT_SYMBOL, 163), + (9, HUFFMAN_EMIT_SYMBOL, 163), + (23, HUFFMAN_EMIT_SYMBOL, 163), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 163), + + # Node 128 + (3, HUFFMAN_EMIT_SYMBOL, 154), + (6, HUFFMAN_EMIT_SYMBOL, 154), + (10, HUFFMAN_EMIT_SYMBOL, 154), + (15, HUFFMAN_EMIT_SYMBOL, 154), + (24, HUFFMAN_EMIT_SYMBOL, 154), + (31, HUFFMAN_EMIT_SYMBOL, 154), + (41, HUFFMAN_EMIT_SYMBOL, 154), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 154), + (3, HUFFMAN_EMIT_SYMBOL, 156), + (6, HUFFMAN_EMIT_SYMBOL, 156), + (10, HUFFMAN_EMIT_SYMBOL, 156), + (15, HUFFMAN_EMIT_SYMBOL, 156), + (24, HUFFMAN_EMIT_SYMBOL, 156), + (31, HUFFMAN_EMIT_SYMBOL, 156), + (41, HUFFMAN_EMIT_SYMBOL, 156), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 156), + + # Node 129 + (3, HUFFMAN_EMIT_SYMBOL, 160), + (6, HUFFMAN_EMIT_SYMBOL, 160), + (10, HUFFMAN_EMIT_SYMBOL, 160), + (15, HUFFMAN_EMIT_SYMBOL, 160), + (24, HUFFMAN_EMIT_SYMBOL, 160), + (31, HUFFMAN_EMIT_SYMBOL, 160), + (41, HUFFMAN_EMIT_SYMBOL, 160), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 160), + (3, HUFFMAN_EMIT_SYMBOL, 163), + (6, HUFFMAN_EMIT_SYMBOL, 163), + (10, HUFFMAN_EMIT_SYMBOL, 163), + (15, HUFFMAN_EMIT_SYMBOL, 163), + (24, HUFFMAN_EMIT_SYMBOL, 163), + (31, HUFFMAN_EMIT_SYMBOL, 163), + (41, HUFFMAN_EMIT_SYMBOL, 163), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 163), + + # Node 130 + (2, HUFFMAN_EMIT_SYMBOL, 164), + (9, HUFFMAN_EMIT_SYMBOL, 164), + (23, HUFFMAN_EMIT_SYMBOL, 164), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 164), + (2, HUFFMAN_EMIT_SYMBOL, 169), + (9, HUFFMAN_EMIT_SYMBOL, 169), + (23, HUFFMAN_EMIT_SYMBOL, 169), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 169), + (2, HUFFMAN_EMIT_SYMBOL, 170), + (9, HUFFMAN_EMIT_SYMBOL, 170), + (23, HUFFMAN_EMIT_SYMBOL, 170), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 170), + (2, HUFFMAN_EMIT_SYMBOL, 173), + (9, HUFFMAN_EMIT_SYMBOL, 173), + (23, HUFFMAN_EMIT_SYMBOL, 173), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 173), + + # Node 131 + (3, HUFFMAN_EMIT_SYMBOL, 164), + (6, HUFFMAN_EMIT_SYMBOL, 164), + (10, HUFFMAN_EMIT_SYMBOL, 164), + (15, HUFFMAN_EMIT_SYMBOL, 164), + (24, HUFFMAN_EMIT_SYMBOL, 164), + (31, HUFFMAN_EMIT_SYMBOL, 164), + (41, HUFFMAN_EMIT_SYMBOL, 164), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 164), + (3, HUFFMAN_EMIT_SYMBOL, 169), + (6, HUFFMAN_EMIT_SYMBOL, 169), + (10, HUFFMAN_EMIT_SYMBOL, 169), + (15, HUFFMAN_EMIT_SYMBOL, 169), + (24, HUFFMAN_EMIT_SYMBOL, 169), + (31, HUFFMAN_EMIT_SYMBOL, 169), + (41, HUFFMAN_EMIT_SYMBOL, 169), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 169), + + # Node 132 + (3, HUFFMAN_EMIT_SYMBOL, 170), + (6, HUFFMAN_EMIT_SYMBOL, 170), + (10, HUFFMAN_EMIT_SYMBOL, 170), + (15, HUFFMAN_EMIT_SYMBOL, 170), + (24, HUFFMAN_EMIT_SYMBOL, 170), + (31, HUFFMAN_EMIT_SYMBOL, 170), + (41, HUFFMAN_EMIT_SYMBOL, 170), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 170), + (3, HUFFMAN_EMIT_SYMBOL, 173), + (6, HUFFMAN_EMIT_SYMBOL, 173), + (10, HUFFMAN_EMIT_SYMBOL, 173), + (15, HUFFMAN_EMIT_SYMBOL, 173), + (24, HUFFMAN_EMIT_SYMBOL, 173), + (31, HUFFMAN_EMIT_SYMBOL, 173), + (41, HUFFMAN_EMIT_SYMBOL, 173), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 173), + + # Node 133 + (137, 0, 0), + (138, 0, 0), + (140, 0, 0), + (141, 0, 0), + (144, 0, 0), + (145, 0, 0), + (147, 0, 0), + (150, 0, 0), + (156, 0, 0), + (159, 0, 0), + (163, 0, 0), + (166, 0, 0), + (171, 0, 0), + (174, 0, 0), + (181, 0, 0), + (190, 0, 0), + + # Node 134 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 178), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 181), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 185), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 186), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 187), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 189), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 190), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 196), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 198), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 228), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 232), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 233), + (148, 0, 0), + (149, 0, 0), + (151, 0, 0), + (152, 0, 0), + + # Node 135 + (1, HUFFMAN_EMIT_SYMBOL, 178), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 178), + (1, HUFFMAN_EMIT_SYMBOL, 181), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 181), + (1, HUFFMAN_EMIT_SYMBOL, 185), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 185), + (1, HUFFMAN_EMIT_SYMBOL, 186), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 186), + (1, HUFFMAN_EMIT_SYMBOL, 187), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 187), + (1, HUFFMAN_EMIT_SYMBOL, 189), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 189), + (1, HUFFMAN_EMIT_SYMBOL, 190), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 190), + (1, HUFFMAN_EMIT_SYMBOL, 196), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 196), + + # Node 136 + (2, HUFFMAN_EMIT_SYMBOL, 178), + (9, HUFFMAN_EMIT_SYMBOL, 178), + (23, HUFFMAN_EMIT_SYMBOL, 178), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 178), + (2, HUFFMAN_EMIT_SYMBOL, 181), + (9, HUFFMAN_EMIT_SYMBOL, 181), + (23, HUFFMAN_EMIT_SYMBOL, 181), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 181), + (2, HUFFMAN_EMIT_SYMBOL, 185), + (9, HUFFMAN_EMIT_SYMBOL, 185), + (23, HUFFMAN_EMIT_SYMBOL, 185), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 185), + (2, HUFFMAN_EMIT_SYMBOL, 186), + (9, HUFFMAN_EMIT_SYMBOL, 186), + (23, HUFFMAN_EMIT_SYMBOL, 186), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 186), + + # Node 137 + (3, HUFFMAN_EMIT_SYMBOL, 178), + (6, HUFFMAN_EMIT_SYMBOL, 178), + (10, HUFFMAN_EMIT_SYMBOL, 178), + (15, HUFFMAN_EMIT_SYMBOL, 178), + (24, HUFFMAN_EMIT_SYMBOL, 178), + (31, HUFFMAN_EMIT_SYMBOL, 178), + (41, HUFFMAN_EMIT_SYMBOL, 178), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 178), + (3, HUFFMAN_EMIT_SYMBOL, 181), + (6, HUFFMAN_EMIT_SYMBOL, 181), + (10, HUFFMAN_EMIT_SYMBOL, 181), + (15, HUFFMAN_EMIT_SYMBOL, 181), + (24, HUFFMAN_EMIT_SYMBOL, 181), + (31, HUFFMAN_EMIT_SYMBOL, 181), + (41, HUFFMAN_EMIT_SYMBOL, 181), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 181), + + # Node 138 + (3, HUFFMAN_EMIT_SYMBOL, 185), + (6, HUFFMAN_EMIT_SYMBOL, 185), + (10, HUFFMAN_EMIT_SYMBOL, 185), + (15, HUFFMAN_EMIT_SYMBOL, 185), + (24, HUFFMAN_EMIT_SYMBOL, 185), + (31, HUFFMAN_EMIT_SYMBOL, 185), + (41, HUFFMAN_EMIT_SYMBOL, 185), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 185), + (3, HUFFMAN_EMIT_SYMBOL, 186), + (6, HUFFMAN_EMIT_SYMBOL, 186), + (10, HUFFMAN_EMIT_SYMBOL, 186), + (15, HUFFMAN_EMIT_SYMBOL, 186), + (24, HUFFMAN_EMIT_SYMBOL, 186), + (31, HUFFMAN_EMIT_SYMBOL, 186), + (41, HUFFMAN_EMIT_SYMBOL, 186), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 186), + + # Node 139 + (2, HUFFMAN_EMIT_SYMBOL, 187), + (9, HUFFMAN_EMIT_SYMBOL, 187), + (23, HUFFMAN_EMIT_SYMBOL, 187), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 187), + (2, HUFFMAN_EMIT_SYMBOL, 189), + (9, HUFFMAN_EMIT_SYMBOL, 189), + (23, HUFFMAN_EMIT_SYMBOL, 189), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 189), + (2, HUFFMAN_EMIT_SYMBOL, 190), + (9, HUFFMAN_EMIT_SYMBOL, 190), + (23, HUFFMAN_EMIT_SYMBOL, 190), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 190), + (2, HUFFMAN_EMIT_SYMBOL, 196), + (9, HUFFMAN_EMIT_SYMBOL, 196), + (23, HUFFMAN_EMIT_SYMBOL, 196), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 196), + + # Node 140 + (3, HUFFMAN_EMIT_SYMBOL, 187), + (6, HUFFMAN_EMIT_SYMBOL, 187), + (10, HUFFMAN_EMIT_SYMBOL, 187), + (15, HUFFMAN_EMIT_SYMBOL, 187), + (24, HUFFMAN_EMIT_SYMBOL, 187), + (31, HUFFMAN_EMIT_SYMBOL, 187), + (41, HUFFMAN_EMIT_SYMBOL, 187), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 187), + (3, HUFFMAN_EMIT_SYMBOL, 189), + (6, HUFFMAN_EMIT_SYMBOL, 189), + (10, HUFFMAN_EMIT_SYMBOL, 189), + (15, HUFFMAN_EMIT_SYMBOL, 189), + (24, HUFFMAN_EMIT_SYMBOL, 189), + (31, HUFFMAN_EMIT_SYMBOL, 189), + (41, HUFFMAN_EMIT_SYMBOL, 189), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 189), + + # Node 141 + (3, HUFFMAN_EMIT_SYMBOL, 190), + (6, HUFFMAN_EMIT_SYMBOL, 190), + (10, HUFFMAN_EMIT_SYMBOL, 190), + (15, HUFFMAN_EMIT_SYMBOL, 190), + (24, HUFFMAN_EMIT_SYMBOL, 190), + (31, HUFFMAN_EMIT_SYMBOL, 190), + (41, HUFFMAN_EMIT_SYMBOL, 190), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 190), + (3, HUFFMAN_EMIT_SYMBOL, 196), + (6, HUFFMAN_EMIT_SYMBOL, 196), + (10, HUFFMAN_EMIT_SYMBOL, 196), + (15, HUFFMAN_EMIT_SYMBOL, 196), + (24, HUFFMAN_EMIT_SYMBOL, 196), + (31, HUFFMAN_EMIT_SYMBOL, 196), + (41, HUFFMAN_EMIT_SYMBOL, 196), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 196), + + # Node 142 + (1, HUFFMAN_EMIT_SYMBOL, 198), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 198), + (1, HUFFMAN_EMIT_SYMBOL, 228), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 228), + (1, HUFFMAN_EMIT_SYMBOL, 232), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 232), + (1, HUFFMAN_EMIT_SYMBOL, 233), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 233), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 1), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 135), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 137), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 138), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 139), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 140), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 141), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 143), + + # Node 143 + (2, HUFFMAN_EMIT_SYMBOL, 198), + (9, HUFFMAN_EMIT_SYMBOL, 198), + (23, HUFFMAN_EMIT_SYMBOL, 198), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 198), + (2, HUFFMAN_EMIT_SYMBOL, 228), + (9, HUFFMAN_EMIT_SYMBOL, 228), + (23, HUFFMAN_EMIT_SYMBOL, 228), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 228), + (2, HUFFMAN_EMIT_SYMBOL, 232), + (9, HUFFMAN_EMIT_SYMBOL, 232), + (23, HUFFMAN_EMIT_SYMBOL, 232), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 232), + (2, HUFFMAN_EMIT_SYMBOL, 233), + (9, HUFFMAN_EMIT_SYMBOL, 233), + (23, HUFFMAN_EMIT_SYMBOL, 233), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 233), + + # Node 144 + (3, HUFFMAN_EMIT_SYMBOL, 198), + (6, HUFFMAN_EMIT_SYMBOL, 198), + (10, HUFFMAN_EMIT_SYMBOL, 198), + (15, HUFFMAN_EMIT_SYMBOL, 198), + (24, HUFFMAN_EMIT_SYMBOL, 198), + (31, HUFFMAN_EMIT_SYMBOL, 198), + (41, HUFFMAN_EMIT_SYMBOL, 198), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 198), + (3, HUFFMAN_EMIT_SYMBOL, 228), + (6, HUFFMAN_EMIT_SYMBOL, 228), + (10, HUFFMAN_EMIT_SYMBOL, 228), + (15, HUFFMAN_EMIT_SYMBOL, 228), + (24, HUFFMAN_EMIT_SYMBOL, 228), + (31, HUFFMAN_EMIT_SYMBOL, 228), + (41, HUFFMAN_EMIT_SYMBOL, 228), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 228), + + # Node 145 + (3, HUFFMAN_EMIT_SYMBOL, 232), + (6, HUFFMAN_EMIT_SYMBOL, 232), + (10, HUFFMAN_EMIT_SYMBOL, 232), + (15, HUFFMAN_EMIT_SYMBOL, 232), + (24, HUFFMAN_EMIT_SYMBOL, 232), + (31, HUFFMAN_EMIT_SYMBOL, 232), + (41, HUFFMAN_EMIT_SYMBOL, 232), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 232), + (3, HUFFMAN_EMIT_SYMBOL, 233), + (6, HUFFMAN_EMIT_SYMBOL, 233), + (10, HUFFMAN_EMIT_SYMBOL, 233), + (15, HUFFMAN_EMIT_SYMBOL, 233), + (24, HUFFMAN_EMIT_SYMBOL, 233), + (31, HUFFMAN_EMIT_SYMBOL, 233), + (41, HUFFMAN_EMIT_SYMBOL, 233), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 233), + + # Node 146 + (1, HUFFMAN_EMIT_SYMBOL, 1), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 1), + (1, HUFFMAN_EMIT_SYMBOL, 135), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 135), + (1, HUFFMAN_EMIT_SYMBOL, 137), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 137), + (1, HUFFMAN_EMIT_SYMBOL, 138), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 138), + (1, HUFFMAN_EMIT_SYMBOL, 139), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 139), + (1, HUFFMAN_EMIT_SYMBOL, 140), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 140), + (1, HUFFMAN_EMIT_SYMBOL, 141), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 141), + (1, HUFFMAN_EMIT_SYMBOL, 143), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 143), + + # Node 147 + (2, HUFFMAN_EMIT_SYMBOL, 1), + (9, HUFFMAN_EMIT_SYMBOL, 1), + (23, HUFFMAN_EMIT_SYMBOL, 1), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 1), + (2, HUFFMAN_EMIT_SYMBOL, 135), + (9, HUFFMAN_EMIT_SYMBOL, 135), + (23, HUFFMAN_EMIT_SYMBOL, 135), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 135), + (2, HUFFMAN_EMIT_SYMBOL, 137), + (9, HUFFMAN_EMIT_SYMBOL, 137), + (23, HUFFMAN_EMIT_SYMBOL, 137), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 137), + (2, HUFFMAN_EMIT_SYMBOL, 138), + (9, HUFFMAN_EMIT_SYMBOL, 138), + (23, HUFFMAN_EMIT_SYMBOL, 138), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 138), + + # Node 148 + (3, HUFFMAN_EMIT_SYMBOL, 1), + (6, HUFFMAN_EMIT_SYMBOL, 1), + (10, HUFFMAN_EMIT_SYMBOL, 1), + (15, HUFFMAN_EMIT_SYMBOL, 1), + (24, HUFFMAN_EMIT_SYMBOL, 1), + (31, HUFFMAN_EMIT_SYMBOL, 1), + (41, HUFFMAN_EMIT_SYMBOL, 1), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 1), + (3, HUFFMAN_EMIT_SYMBOL, 135), + (6, HUFFMAN_EMIT_SYMBOL, 135), + (10, HUFFMAN_EMIT_SYMBOL, 135), + (15, HUFFMAN_EMIT_SYMBOL, 135), + (24, HUFFMAN_EMIT_SYMBOL, 135), + (31, HUFFMAN_EMIT_SYMBOL, 135), + (41, HUFFMAN_EMIT_SYMBOL, 135), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 135), + + # Node 149 + (3, HUFFMAN_EMIT_SYMBOL, 137), + (6, HUFFMAN_EMIT_SYMBOL, 137), + (10, HUFFMAN_EMIT_SYMBOL, 137), + (15, HUFFMAN_EMIT_SYMBOL, 137), + (24, HUFFMAN_EMIT_SYMBOL, 137), + (31, HUFFMAN_EMIT_SYMBOL, 137), + (41, HUFFMAN_EMIT_SYMBOL, 137), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 137), + (3, HUFFMAN_EMIT_SYMBOL, 138), + (6, HUFFMAN_EMIT_SYMBOL, 138), + (10, HUFFMAN_EMIT_SYMBOL, 138), + (15, HUFFMAN_EMIT_SYMBOL, 138), + (24, HUFFMAN_EMIT_SYMBOL, 138), + (31, HUFFMAN_EMIT_SYMBOL, 138), + (41, HUFFMAN_EMIT_SYMBOL, 138), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 138), + + # Node 150 + (2, HUFFMAN_EMIT_SYMBOL, 139), + (9, HUFFMAN_EMIT_SYMBOL, 139), + (23, HUFFMAN_EMIT_SYMBOL, 139), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 139), + (2, HUFFMAN_EMIT_SYMBOL, 140), + (9, HUFFMAN_EMIT_SYMBOL, 140), + (23, HUFFMAN_EMIT_SYMBOL, 140), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 140), + (2, HUFFMAN_EMIT_SYMBOL, 141), + (9, HUFFMAN_EMIT_SYMBOL, 141), + (23, HUFFMAN_EMIT_SYMBOL, 141), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 141), + (2, HUFFMAN_EMIT_SYMBOL, 143), + (9, HUFFMAN_EMIT_SYMBOL, 143), + (23, HUFFMAN_EMIT_SYMBOL, 143), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 143), + + # Node 151 + (3, HUFFMAN_EMIT_SYMBOL, 139), + (6, HUFFMAN_EMIT_SYMBOL, 139), + (10, HUFFMAN_EMIT_SYMBOL, 139), + (15, HUFFMAN_EMIT_SYMBOL, 139), + (24, HUFFMAN_EMIT_SYMBOL, 139), + (31, HUFFMAN_EMIT_SYMBOL, 139), + (41, HUFFMAN_EMIT_SYMBOL, 139), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 139), + (3, HUFFMAN_EMIT_SYMBOL, 140), + (6, HUFFMAN_EMIT_SYMBOL, 140), + (10, HUFFMAN_EMIT_SYMBOL, 140), + (15, HUFFMAN_EMIT_SYMBOL, 140), + (24, HUFFMAN_EMIT_SYMBOL, 140), + (31, HUFFMAN_EMIT_SYMBOL, 140), + (41, HUFFMAN_EMIT_SYMBOL, 140), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 140), + + # Node 152 + (3, HUFFMAN_EMIT_SYMBOL, 141), + (6, HUFFMAN_EMIT_SYMBOL, 141), + (10, HUFFMAN_EMIT_SYMBOL, 141), + (15, HUFFMAN_EMIT_SYMBOL, 141), + (24, HUFFMAN_EMIT_SYMBOL, 141), + (31, HUFFMAN_EMIT_SYMBOL, 141), + (41, HUFFMAN_EMIT_SYMBOL, 141), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 141), + (3, HUFFMAN_EMIT_SYMBOL, 143), + (6, HUFFMAN_EMIT_SYMBOL, 143), + (10, HUFFMAN_EMIT_SYMBOL, 143), + (15, HUFFMAN_EMIT_SYMBOL, 143), + (24, HUFFMAN_EMIT_SYMBOL, 143), + (31, HUFFMAN_EMIT_SYMBOL, 143), + (41, HUFFMAN_EMIT_SYMBOL, 143), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 143), + + # Node 153 + (157, 0, 0), + (158, 0, 0), + (160, 0, 0), + (161, 0, 0), + (164, 0, 0), + (165, 0, 0), + (167, 0, 0), + (168, 0, 0), + (172, 0, 0), + (173, 0, 0), + (175, 0, 0), + (177, 0, 0), + (182, 0, 0), + (185, 0, 0), + (191, 0, 0), + (207, 0, 0), + + # Node 154 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 147), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 149), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 150), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 151), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 152), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 155), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 157), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 158), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 165), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 166), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 168), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 174), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 175), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 180), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 182), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 183), + + # Node 155 + (1, HUFFMAN_EMIT_SYMBOL, 147), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 147), + (1, HUFFMAN_EMIT_SYMBOL, 149), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 149), + (1, HUFFMAN_EMIT_SYMBOL, 150), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 150), + (1, HUFFMAN_EMIT_SYMBOL, 151), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 151), + (1, HUFFMAN_EMIT_SYMBOL, 152), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 152), + (1, HUFFMAN_EMIT_SYMBOL, 155), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 155), + (1, HUFFMAN_EMIT_SYMBOL, 157), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 157), + (1, HUFFMAN_EMIT_SYMBOL, 158), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 158), + + # Node 156 + (2, HUFFMAN_EMIT_SYMBOL, 147), + (9, HUFFMAN_EMIT_SYMBOL, 147), + (23, HUFFMAN_EMIT_SYMBOL, 147), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 147), + (2, HUFFMAN_EMIT_SYMBOL, 149), + (9, HUFFMAN_EMIT_SYMBOL, 149), + (23, HUFFMAN_EMIT_SYMBOL, 149), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 149), + (2, HUFFMAN_EMIT_SYMBOL, 150), + (9, HUFFMAN_EMIT_SYMBOL, 150), + (23, HUFFMAN_EMIT_SYMBOL, 150), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 150), + (2, HUFFMAN_EMIT_SYMBOL, 151), + (9, HUFFMAN_EMIT_SYMBOL, 151), + (23, HUFFMAN_EMIT_SYMBOL, 151), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 151), + + # Node 157 + (3, HUFFMAN_EMIT_SYMBOL, 147), + (6, HUFFMAN_EMIT_SYMBOL, 147), + (10, HUFFMAN_EMIT_SYMBOL, 147), + (15, HUFFMAN_EMIT_SYMBOL, 147), + (24, HUFFMAN_EMIT_SYMBOL, 147), + (31, HUFFMAN_EMIT_SYMBOL, 147), + (41, HUFFMAN_EMIT_SYMBOL, 147), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 147), + (3, HUFFMAN_EMIT_SYMBOL, 149), + (6, HUFFMAN_EMIT_SYMBOL, 149), + (10, HUFFMAN_EMIT_SYMBOL, 149), + (15, HUFFMAN_EMIT_SYMBOL, 149), + (24, HUFFMAN_EMIT_SYMBOL, 149), + (31, HUFFMAN_EMIT_SYMBOL, 149), + (41, HUFFMAN_EMIT_SYMBOL, 149), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 149), + + # Node 158 + (3, HUFFMAN_EMIT_SYMBOL, 150), + (6, HUFFMAN_EMIT_SYMBOL, 150), + (10, HUFFMAN_EMIT_SYMBOL, 150), + (15, HUFFMAN_EMIT_SYMBOL, 150), + (24, HUFFMAN_EMIT_SYMBOL, 150), + (31, HUFFMAN_EMIT_SYMBOL, 150), + (41, HUFFMAN_EMIT_SYMBOL, 150), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 150), + (3, HUFFMAN_EMIT_SYMBOL, 151), + (6, HUFFMAN_EMIT_SYMBOL, 151), + (10, HUFFMAN_EMIT_SYMBOL, 151), + (15, HUFFMAN_EMIT_SYMBOL, 151), + (24, HUFFMAN_EMIT_SYMBOL, 151), + (31, HUFFMAN_EMIT_SYMBOL, 151), + (41, HUFFMAN_EMIT_SYMBOL, 151), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 151), + + # Node 159 + (2, HUFFMAN_EMIT_SYMBOL, 152), + (9, HUFFMAN_EMIT_SYMBOL, 152), + (23, HUFFMAN_EMIT_SYMBOL, 152), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 152), + (2, HUFFMAN_EMIT_SYMBOL, 155), + (9, HUFFMAN_EMIT_SYMBOL, 155), + (23, HUFFMAN_EMIT_SYMBOL, 155), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 155), + (2, HUFFMAN_EMIT_SYMBOL, 157), + (9, HUFFMAN_EMIT_SYMBOL, 157), + (23, HUFFMAN_EMIT_SYMBOL, 157), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 157), + (2, HUFFMAN_EMIT_SYMBOL, 158), + (9, HUFFMAN_EMIT_SYMBOL, 158), + (23, HUFFMAN_EMIT_SYMBOL, 158), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 158), + + # Node 160 + (3, HUFFMAN_EMIT_SYMBOL, 152), + (6, HUFFMAN_EMIT_SYMBOL, 152), + (10, HUFFMAN_EMIT_SYMBOL, 152), + (15, HUFFMAN_EMIT_SYMBOL, 152), + (24, HUFFMAN_EMIT_SYMBOL, 152), + (31, HUFFMAN_EMIT_SYMBOL, 152), + (41, HUFFMAN_EMIT_SYMBOL, 152), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 152), + (3, HUFFMAN_EMIT_SYMBOL, 155), + (6, HUFFMAN_EMIT_SYMBOL, 155), + (10, HUFFMAN_EMIT_SYMBOL, 155), + (15, HUFFMAN_EMIT_SYMBOL, 155), + (24, HUFFMAN_EMIT_SYMBOL, 155), + (31, HUFFMAN_EMIT_SYMBOL, 155), + (41, HUFFMAN_EMIT_SYMBOL, 155), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 155), + + # Node 161 + (3, HUFFMAN_EMIT_SYMBOL, 157), + (6, HUFFMAN_EMIT_SYMBOL, 157), + (10, HUFFMAN_EMIT_SYMBOL, 157), + (15, HUFFMAN_EMIT_SYMBOL, 157), + (24, HUFFMAN_EMIT_SYMBOL, 157), + (31, HUFFMAN_EMIT_SYMBOL, 157), + (41, HUFFMAN_EMIT_SYMBOL, 157), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 157), + (3, HUFFMAN_EMIT_SYMBOL, 158), + (6, HUFFMAN_EMIT_SYMBOL, 158), + (10, HUFFMAN_EMIT_SYMBOL, 158), + (15, HUFFMAN_EMIT_SYMBOL, 158), + (24, HUFFMAN_EMIT_SYMBOL, 158), + (31, HUFFMAN_EMIT_SYMBOL, 158), + (41, HUFFMAN_EMIT_SYMBOL, 158), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 158), + + # Node 162 + (1, HUFFMAN_EMIT_SYMBOL, 165), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 165), + (1, HUFFMAN_EMIT_SYMBOL, 166), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 166), + (1, HUFFMAN_EMIT_SYMBOL, 168), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 168), + (1, HUFFMAN_EMIT_SYMBOL, 174), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 174), + (1, HUFFMAN_EMIT_SYMBOL, 175), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 175), + (1, HUFFMAN_EMIT_SYMBOL, 180), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 180), + (1, HUFFMAN_EMIT_SYMBOL, 182), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 182), + (1, HUFFMAN_EMIT_SYMBOL, 183), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 183), + + # Node 163 + (2, HUFFMAN_EMIT_SYMBOL, 165), + (9, HUFFMAN_EMIT_SYMBOL, 165), + (23, HUFFMAN_EMIT_SYMBOL, 165), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 165), + (2, HUFFMAN_EMIT_SYMBOL, 166), + (9, HUFFMAN_EMIT_SYMBOL, 166), + (23, HUFFMAN_EMIT_SYMBOL, 166), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 166), + (2, HUFFMAN_EMIT_SYMBOL, 168), + (9, HUFFMAN_EMIT_SYMBOL, 168), + (23, HUFFMAN_EMIT_SYMBOL, 168), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 168), + (2, HUFFMAN_EMIT_SYMBOL, 174), + (9, HUFFMAN_EMIT_SYMBOL, 174), + (23, HUFFMAN_EMIT_SYMBOL, 174), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 174), + + # Node 164 + (3, HUFFMAN_EMIT_SYMBOL, 165), + (6, HUFFMAN_EMIT_SYMBOL, 165), + (10, HUFFMAN_EMIT_SYMBOL, 165), + (15, HUFFMAN_EMIT_SYMBOL, 165), + (24, HUFFMAN_EMIT_SYMBOL, 165), + (31, HUFFMAN_EMIT_SYMBOL, 165), + (41, HUFFMAN_EMIT_SYMBOL, 165), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 165), + (3, HUFFMAN_EMIT_SYMBOL, 166), + (6, HUFFMAN_EMIT_SYMBOL, 166), + (10, HUFFMAN_EMIT_SYMBOL, 166), + (15, HUFFMAN_EMIT_SYMBOL, 166), + (24, HUFFMAN_EMIT_SYMBOL, 166), + (31, HUFFMAN_EMIT_SYMBOL, 166), + (41, HUFFMAN_EMIT_SYMBOL, 166), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 166), + + # Node 165 + (3, HUFFMAN_EMIT_SYMBOL, 168), + (6, HUFFMAN_EMIT_SYMBOL, 168), + (10, HUFFMAN_EMIT_SYMBOL, 168), + (15, HUFFMAN_EMIT_SYMBOL, 168), + (24, HUFFMAN_EMIT_SYMBOL, 168), + (31, HUFFMAN_EMIT_SYMBOL, 168), + (41, HUFFMAN_EMIT_SYMBOL, 168), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 168), + (3, HUFFMAN_EMIT_SYMBOL, 174), + (6, HUFFMAN_EMIT_SYMBOL, 174), + (10, HUFFMAN_EMIT_SYMBOL, 174), + (15, HUFFMAN_EMIT_SYMBOL, 174), + (24, HUFFMAN_EMIT_SYMBOL, 174), + (31, HUFFMAN_EMIT_SYMBOL, 174), + (41, HUFFMAN_EMIT_SYMBOL, 174), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 174), + + # Node 166 + (2, HUFFMAN_EMIT_SYMBOL, 175), + (9, HUFFMAN_EMIT_SYMBOL, 175), + (23, HUFFMAN_EMIT_SYMBOL, 175), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 175), + (2, HUFFMAN_EMIT_SYMBOL, 180), + (9, HUFFMAN_EMIT_SYMBOL, 180), + (23, HUFFMAN_EMIT_SYMBOL, 180), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 180), + (2, HUFFMAN_EMIT_SYMBOL, 182), + (9, HUFFMAN_EMIT_SYMBOL, 182), + (23, HUFFMAN_EMIT_SYMBOL, 182), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 182), + (2, HUFFMAN_EMIT_SYMBOL, 183), + (9, HUFFMAN_EMIT_SYMBOL, 183), + (23, HUFFMAN_EMIT_SYMBOL, 183), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 183), + + # Node 167 + (3, HUFFMAN_EMIT_SYMBOL, 175), + (6, HUFFMAN_EMIT_SYMBOL, 175), + (10, HUFFMAN_EMIT_SYMBOL, 175), + (15, HUFFMAN_EMIT_SYMBOL, 175), + (24, HUFFMAN_EMIT_SYMBOL, 175), + (31, HUFFMAN_EMIT_SYMBOL, 175), + (41, HUFFMAN_EMIT_SYMBOL, 175), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 175), + (3, HUFFMAN_EMIT_SYMBOL, 180), + (6, HUFFMAN_EMIT_SYMBOL, 180), + (10, HUFFMAN_EMIT_SYMBOL, 180), + (15, HUFFMAN_EMIT_SYMBOL, 180), + (24, HUFFMAN_EMIT_SYMBOL, 180), + (31, HUFFMAN_EMIT_SYMBOL, 180), + (41, HUFFMAN_EMIT_SYMBOL, 180), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 180), + + # Node 168 + (3, HUFFMAN_EMIT_SYMBOL, 182), + (6, HUFFMAN_EMIT_SYMBOL, 182), + (10, HUFFMAN_EMIT_SYMBOL, 182), + (15, HUFFMAN_EMIT_SYMBOL, 182), + (24, HUFFMAN_EMIT_SYMBOL, 182), + (31, HUFFMAN_EMIT_SYMBOL, 182), + (41, HUFFMAN_EMIT_SYMBOL, 182), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 182), + (3, HUFFMAN_EMIT_SYMBOL, 183), + (6, HUFFMAN_EMIT_SYMBOL, 183), + (10, HUFFMAN_EMIT_SYMBOL, 183), + (15, HUFFMAN_EMIT_SYMBOL, 183), + (24, HUFFMAN_EMIT_SYMBOL, 183), + (31, HUFFMAN_EMIT_SYMBOL, 183), + (41, HUFFMAN_EMIT_SYMBOL, 183), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 183), + + # Node 169 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 188), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 191), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 197), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 231), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 239), + (176, 0, 0), + (178, 0, 0), + (179, 0, 0), + (183, 0, 0), + (184, 0, 0), + (186, 0, 0), + (187, 0, 0), + (192, 0, 0), + (199, 0, 0), + (208, 0, 0), + (223, 0, 0), + + # Node 170 + (1, HUFFMAN_EMIT_SYMBOL, 188), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 188), + (1, HUFFMAN_EMIT_SYMBOL, 191), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 191), + (1, HUFFMAN_EMIT_SYMBOL, 197), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 197), + (1, HUFFMAN_EMIT_SYMBOL, 231), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 231), + (1, HUFFMAN_EMIT_SYMBOL, 239), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 239), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 9), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 142), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 144), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 145), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 148), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 159), + + # Node 171 + (2, HUFFMAN_EMIT_SYMBOL, 188), + (9, HUFFMAN_EMIT_SYMBOL, 188), + (23, HUFFMAN_EMIT_SYMBOL, 188), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 188), + (2, HUFFMAN_EMIT_SYMBOL, 191), + (9, HUFFMAN_EMIT_SYMBOL, 191), + (23, HUFFMAN_EMIT_SYMBOL, 191), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 191), + (2, HUFFMAN_EMIT_SYMBOL, 197), + (9, HUFFMAN_EMIT_SYMBOL, 197), + (23, HUFFMAN_EMIT_SYMBOL, 197), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 197), + (2, HUFFMAN_EMIT_SYMBOL, 231), + (9, HUFFMAN_EMIT_SYMBOL, 231), + (23, HUFFMAN_EMIT_SYMBOL, 231), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 231), + + # Node 172 + (3, HUFFMAN_EMIT_SYMBOL, 188), + (6, HUFFMAN_EMIT_SYMBOL, 188), + (10, HUFFMAN_EMIT_SYMBOL, 188), + (15, HUFFMAN_EMIT_SYMBOL, 188), + (24, HUFFMAN_EMIT_SYMBOL, 188), + (31, HUFFMAN_EMIT_SYMBOL, 188), + (41, HUFFMAN_EMIT_SYMBOL, 188), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 188), + (3, HUFFMAN_EMIT_SYMBOL, 191), + (6, HUFFMAN_EMIT_SYMBOL, 191), + (10, HUFFMAN_EMIT_SYMBOL, 191), + (15, HUFFMAN_EMIT_SYMBOL, 191), + (24, HUFFMAN_EMIT_SYMBOL, 191), + (31, HUFFMAN_EMIT_SYMBOL, 191), + (41, HUFFMAN_EMIT_SYMBOL, 191), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 191), + + # Node 173 + (3, HUFFMAN_EMIT_SYMBOL, 197), + (6, HUFFMAN_EMIT_SYMBOL, 197), + (10, HUFFMAN_EMIT_SYMBOL, 197), + (15, HUFFMAN_EMIT_SYMBOL, 197), + (24, HUFFMAN_EMIT_SYMBOL, 197), + (31, HUFFMAN_EMIT_SYMBOL, 197), + (41, HUFFMAN_EMIT_SYMBOL, 197), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 197), + (3, HUFFMAN_EMIT_SYMBOL, 231), + (6, HUFFMAN_EMIT_SYMBOL, 231), + (10, HUFFMAN_EMIT_SYMBOL, 231), + (15, HUFFMAN_EMIT_SYMBOL, 231), + (24, HUFFMAN_EMIT_SYMBOL, 231), + (31, HUFFMAN_EMIT_SYMBOL, 231), + (41, HUFFMAN_EMIT_SYMBOL, 231), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 231), + + # Node 174 + (2, HUFFMAN_EMIT_SYMBOL, 239), + (9, HUFFMAN_EMIT_SYMBOL, 239), + (23, HUFFMAN_EMIT_SYMBOL, 239), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 239), + (1, HUFFMAN_EMIT_SYMBOL, 9), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 9), + (1, HUFFMAN_EMIT_SYMBOL, 142), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 142), + (1, HUFFMAN_EMIT_SYMBOL, 144), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 144), + (1, HUFFMAN_EMIT_SYMBOL, 145), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 145), + (1, HUFFMAN_EMIT_SYMBOL, 148), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 148), + (1, HUFFMAN_EMIT_SYMBOL, 159), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 159), + + # Node 175 + (3, HUFFMAN_EMIT_SYMBOL, 239), + (6, HUFFMAN_EMIT_SYMBOL, 239), + (10, HUFFMAN_EMIT_SYMBOL, 239), + (15, HUFFMAN_EMIT_SYMBOL, 239), + (24, HUFFMAN_EMIT_SYMBOL, 239), + (31, HUFFMAN_EMIT_SYMBOL, 239), + (41, HUFFMAN_EMIT_SYMBOL, 239), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 239), + (2, HUFFMAN_EMIT_SYMBOL, 9), + (9, HUFFMAN_EMIT_SYMBOL, 9), + (23, HUFFMAN_EMIT_SYMBOL, 9), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 9), + (2, HUFFMAN_EMIT_SYMBOL, 142), + (9, HUFFMAN_EMIT_SYMBOL, 142), + (23, HUFFMAN_EMIT_SYMBOL, 142), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 142), + + # Node 176 + (3, HUFFMAN_EMIT_SYMBOL, 9), + (6, HUFFMAN_EMIT_SYMBOL, 9), + (10, HUFFMAN_EMIT_SYMBOL, 9), + (15, HUFFMAN_EMIT_SYMBOL, 9), + (24, HUFFMAN_EMIT_SYMBOL, 9), + (31, HUFFMAN_EMIT_SYMBOL, 9), + (41, HUFFMAN_EMIT_SYMBOL, 9), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 9), + (3, HUFFMAN_EMIT_SYMBOL, 142), + (6, HUFFMAN_EMIT_SYMBOL, 142), + (10, HUFFMAN_EMIT_SYMBOL, 142), + (15, HUFFMAN_EMIT_SYMBOL, 142), + (24, HUFFMAN_EMIT_SYMBOL, 142), + (31, HUFFMAN_EMIT_SYMBOL, 142), + (41, HUFFMAN_EMIT_SYMBOL, 142), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 142), + + # Node 177 + (2, HUFFMAN_EMIT_SYMBOL, 144), + (9, HUFFMAN_EMIT_SYMBOL, 144), + (23, HUFFMAN_EMIT_SYMBOL, 144), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 144), + (2, HUFFMAN_EMIT_SYMBOL, 145), + (9, HUFFMAN_EMIT_SYMBOL, 145), + (23, HUFFMAN_EMIT_SYMBOL, 145), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 145), + (2, HUFFMAN_EMIT_SYMBOL, 148), + (9, HUFFMAN_EMIT_SYMBOL, 148), + (23, HUFFMAN_EMIT_SYMBOL, 148), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 148), + (2, HUFFMAN_EMIT_SYMBOL, 159), + (9, HUFFMAN_EMIT_SYMBOL, 159), + (23, HUFFMAN_EMIT_SYMBOL, 159), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 159), + + # Node 178 + (3, HUFFMAN_EMIT_SYMBOL, 144), + (6, HUFFMAN_EMIT_SYMBOL, 144), + (10, HUFFMAN_EMIT_SYMBOL, 144), + (15, HUFFMAN_EMIT_SYMBOL, 144), + (24, HUFFMAN_EMIT_SYMBOL, 144), + (31, HUFFMAN_EMIT_SYMBOL, 144), + (41, HUFFMAN_EMIT_SYMBOL, 144), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 144), + (3, HUFFMAN_EMIT_SYMBOL, 145), + (6, HUFFMAN_EMIT_SYMBOL, 145), + (10, HUFFMAN_EMIT_SYMBOL, 145), + (15, HUFFMAN_EMIT_SYMBOL, 145), + (24, HUFFMAN_EMIT_SYMBOL, 145), + (31, HUFFMAN_EMIT_SYMBOL, 145), + (41, HUFFMAN_EMIT_SYMBOL, 145), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 145), + + # Node 179 + (3, HUFFMAN_EMIT_SYMBOL, 148), + (6, HUFFMAN_EMIT_SYMBOL, 148), + (10, HUFFMAN_EMIT_SYMBOL, 148), + (15, HUFFMAN_EMIT_SYMBOL, 148), + (24, HUFFMAN_EMIT_SYMBOL, 148), + (31, HUFFMAN_EMIT_SYMBOL, 148), + (41, HUFFMAN_EMIT_SYMBOL, 148), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 148), + (3, HUFFMAN_EMIT_SYMBOL, 159), + (6, HUFFMAN_EMIT_SYMBOL, 159), + (10, HUFFMAN_EMIT_SYMBOL, 159), + (15, HUFFMAN_EMIT_SYMBOL, 159), + (24, HUFFMAN_EMIT_SYMBOL, 159), + (31, HUFFMAN_EMIT_SYMBOL, 159), + (41, HUFFMAN_EMIT_SYMBOL, 159), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 159), + + # Node 180 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 171), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 206), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 215), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 225), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 236), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 237), + (188, 0, 0), + (189, 0, 0), + (193, 0, 0), + (196, 0, 0), + (200, 0, 0), + (203, 0, 0), + (209, 0, 0), + (216, 0, 0), + (224, 0, 0), + (238, 0, 0), + + # Node 181 + (1, HUFFMAN_EMIT_SYMBOL, 171), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 171), + (1, HUFFMAN_EMIT_SYMBOL, 206), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 206), + (1, HUFFMAN_EMIT_SYMBOL, 215), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 215), + (1, HUFFMAN_EMIT_SYMBOL, 225), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 225), + (1, HUFFMAN_EMIT_SYMBOL, 236), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 236), + (1, HUFFMAN_EMIT_SYMBOL, 237), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 237), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 199), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 207), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 234), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 235), + + # Node 182 + (2, HUFFMAN_EMIT_SYMBOL, 171), + (9, HUFFMAN_EMIT_SYMBOL, 171), + (23, HUFFMAN_EMIT_SYMBOL, 171), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 171), + (2, HUFFMAN_EMIT_SYMBOL, 206), + (9, HUFFMAN_EMIT_SYMBOL, 206), + (23, HUFFMAN_EMIT_SYMBOL, 206), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 206), + (2, HUFFMAN_EMIT_SYMBOL, 215), + (9, HUFFMAN_EMIT_SYMBOL, 215), + (23, HUFFMAN_EMIT_SYMBOL, 215), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 215), + (2, HUFFMAN_EMIT_SYMBOL, 225), + (9, HUFFMAN_EMIT_SYMBOL, 225), + (23, HUFFMAN_EMIT_SYMBOL, 225), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 225), + + # Node 183 + (3, HUFFMAN_EMIT_SYMBOL, 171), + (6, HUFFMAN_EMIT_SYMBOL, 171), + (10, HUFFMAN_EMIT_SYMBOL, 171), + (15, HUFFMAN_EMIT_SYMBOL, 171), + (24, HUFFMAN_EMIT_SYMBOL, 171), + (31, HUFFMAN_EMIT_SYMBOL, 171), + (41, HUFFMAN_EMIT_SYMBOL, 171), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 171), + (3, HUFFMAN_EMIT_SYMBOL, 206), + (6, HUFFMAN_EMIT_SYMBOL, 206), + (10, HUFFMAN_EMIT_SYMBOL, 206), + (15, HUFFMAN_EMIT_SYMBOL, 206), + (24, HUFFMAN_EMIT_SYMBOL, 206), + (31, HUFFMAN_EMIT_SYMBOL, 206), + (41, HUFFMAN_EMIT_SYMBOL, 206), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 206), + + # Node 184 + (3, HUFFMAN_EMIT_SYMBOL, 215), + (6, HUFFMAN_EMIT_SYMBOL, 215), + (10, HUFFMAN_EMIT_SYMBOL, 215), + (15, HUFFMAN_EMIT_SYMBOL, 215), + (24, HUFFMAN_EMIT_SYMBOL, 215), + (31, HUFFMAN_EMIT_SYMBOL, 215), + (41, HUFFMAN_EMIT_SYMBOL, 215), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 215), + (3, HUFFMAN_EMIT_SYMBOL, 225), + (6, HUFFMAN_EMIT_SYMBOL, 225), + (10, HUFFMAN_EMIT_SYMBOL, 225), + (15, HUFFMAN_EMIT_SYMBOL, 225), + (24, HUFFMAN_EMIT_SYMBOL, 225), + (31, HUFFMAN_EMIT_SYMBOL, 225), + (41, HUFFMAN_EMIT_SYMBOL, 225), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 225), + + # Node 185 + (2, HUFFMAN_EMIT_SYMBOL, 236), + (9, HUFFMAN_EMIT_SYMBOL, 236), + (23, HUFFMAN_EMIT_SYMBOL, 236), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 236), + (2, HUFFMAN_EMIT_SYMBOL, 237), + (9, HUFFMAN_EMIT_SYMBOL, 237), + (23, HUFFMAN_EMIT_SYMBOL, 237), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 237), + (1, HUFFMAN_EMIT_SYMBOL, 199), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 199), + (1, HUFFMAN_EMIT_SYMBOL, 207), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 207), + (1, HUFFMAN_EMIT_SYMBOL, 234), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 234), + (1, HUFFMAN_EMIT_SYMBOL, 235), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 235), + + # Node 186 + (3, HUFFMAN_EMIT_SYMBOL, 236), + (6, HUFFMAN_EMIT_SYMBOL, 236), + (10, HUFFMAN_EMIT_SYMBOL, 236), + (15, HUFFMAN_EMIT_SYMBOL, 236), + (24, HUFFMAN_EMIT_SYMBOL, 236), + (31, HUFFMAN_EMIT_SYMBOL, 236), + (41, HUFFMAN_EMIT_SYMBOL, 236), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 236), + (3, HUFFMAN_EMIT_SYMBOL, 237), + (6, HUFFMAN_EMIT_SYMBOL, 237), + (10, HUFFMAN_EMIT_SYMBOL, 237), + (15, HUFFMAN_EMIT_SYMBOL, 237), + (24, HUFFMAN_EMIT_SYMBOL, 237), + (31, HUFFMAN_EMIT_SYMBOL, 237), + (41, HUFFMAN_EMIT_SYMBOL, 237), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 237), + + # Node 187 + (2, HUFFMAN_EMIT_SYMBOL, 199), + (9, HUFFMAN_EMIT_SYMBOL, 199), + (23, HUFFMAN_EMIT_SYMBOL, 199), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 199), + (2, HUFFMAN_EMIT_SYMBOL, 207), + (9, HUFFMAN_EMIT_SYMBOL, 207), + (23, HUFFMAN_EMIT_SYMBOL, 207), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 207), + (2, HUFFMAN_EMIT_SYMBOL, 234), + (9, HUFFMAN_EMIT_SYMBOL, 234), + (23, HUFFMAN_EMIT_SYMBOL, 234), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 234), + (2, HUFFMAN_EMIT_SYMBOL, 235), + (9, HUFFMAN_EMIT_SYMBOL, 235), + (23, HUFFMAN_EMIT_SYMBOL, 235), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 235), + + # Node 188 + (3, HUFFMAN_EMIT_SYMBOL, 199), + (6, HUFFMAN_EMIT_SYMBOL, 199), + (10, HUFFMAN_EMIT_SYMBOL, 199), + (15, HUFFMAN_EMIT_SYMBOL, 199), + (24, HUFFMAN_EMIT_SYMBOL, 199), + (31, HUFFMAN_EMIT_SYMBOL, 199), + (41, HUFFMAN_EMIT_SYMBOL, 199), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 199), + (3, HUFFMAN_EMIT_SYMBOL, 207), + (6, HUFFMAN_EMIT_SYMBOL, 207), + (10, HUFFMAN_EMIT_SYMBOL, 207), + (15, HUFFMAN_EMIT_SYMBOL, 207), + (24, HUFFMAN_EMIT_SYMBOL, 207), + (31, HUFFMAN_EMIT_SYMBOL, 207), + (41, HUFFMAN_EMIT_SYMBOL, 207), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 207), + + # Node 189 + (3, HUFFMAN_EMIT_SYMBOL, 234), + (6, HUFFMAN_EMIT_SYMBOL, 234), + (10, HUFFMAN_EMIT_SYMBOL, 234), + (15, HUFFMAN_EMIT_SYMBOL, 234), + (24, HUFFMAN_EMIT_SYMBOL, 234), + (31, HUFFMAN_EMIT_SYMBOL, 234), + (41, HUFFMAN_EMIT_SYMBOL, 234), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 234), + (3, HUFFMAN_EMIT_SYMBOL, 235), + (6, HUFFMAN_EMIT_SYMBOL, 235), + (10, HUFFMAN_EMIT_SYMBOL, 235), + (15, HUFFMAN_EMIT_SYMBOL, 235), + (24, HUFFMAN_EMIT_SYMBOL, 235), + (31, HUFFMAN_EMIT_SYMBOL, 235), + (41, HUFFMAN_EMIT_SYMBOL, 235), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 235), + + # Node 190 + (194, 0, 0), + (195, 0, 0), + (197, 0, 0), + (198, 0, 0), + (201, 0, 0), + (202, 0, 0), + (204, 0, 0), + (205, 0, 0), + (210, 0, 0), + (213, 0, 0), + (217, 0, 0), + (220, 0, 0), + (225, 0, 0), + (231, 0, 0), + (239, 0, 0), + (246, 0, 0), + + # Node 191 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 192), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 193), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 200), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 201), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 202), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 205), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 210), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 213), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 218), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 219), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 238), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 240), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 242), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 243), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 255), + (206, 0, 0), + + # Node 192 + (1, HUFFMAN_EMIT_SYMBOL, 192), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 192), + (1, HUFFMAN_EMIT_SYMBOL, 193), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 193), + (1, HUFFMAN_EMIT_SYMBOL, 200), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 200), + (1, HUFFMAN_EMIT_SYMBOL, 201), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 201), + (1, HUFFMAN_EMIT_SYMBOL, 202), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 202), + (1, HUFFMAN_EMIT_SYMBOL, 205), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 205), + (1, HUFFMAN_EMIT_SYMBOL, 210), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 210), + (1, HUFFMAN_EMIT_SYMBOL, 213), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 213), + + # Node 193 + (2, HUFFMAN_EMIT_SYMBOL, 192), + (9, HUFFMAN_EMIT_SYMBOL, 192), + (23, HUFFMAN_EMIT_SYMBOL, 192), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 192), + (2, HUFFMAN_EMIT_SYMBOL, 193), + (9, HUFFMAN_EMIT_SYMBOL, 193), + (23, HUFFMAN_EMIT_SYMBOL, 193), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 193), + (2, HUFFMAN_EMIT_SYMBOL, 200), + (9, HUFFMAN_EMIT_SYMBOL, 200), + (23, HUFFMAN_EMIT_SYMBOL, 200), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 200), + (2, HUFFMAN_EMIT_SYMBOL, 201), + (9, HUFFMAN_EMIT_SYMBOL, 201), + (23, HUFFMAN_EMIT_SYMBOL, 201), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 201), + + # Node 194 + (3, HUFFMAN_EMIT_SYMBOL, 192), + (6, HUFFMAN_EMIT_SYMBOL, 192), + (10, HUFFMAN_EMIT_SYMBOL, 192), + (15, HUFFMAN_EMIT_SYMBOL, 192), + (24, HUFFMAN_EMIT_SYMBOL, 192), + (31, HUFFMAN_EMIT_SYMBOL, 192), + (41, HUFFMAN_EMIT_SYMBOL, 192), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 192), + (3, HUFFMAN_EMIT_SYMBOL, 193), + (6, HUFFMAN_EMIT_SYMBOL, 193), + (10, HUFFMAN_EMIT_SYMBOL, 193), + (15, HUFFMAN_EMIT_SYMBOL, 193), + (24, HUFFMAN_EMIT_SYMBOL, 193), + (31, HUFFMAN_EMIT_SYMBOL, 193), + (41, HUFFMAN_EMIT_SYMBOL, 193), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 193), + + # Node 195 + (3, HUFFMAN_EMIT_SYMBOL, 200), + (6, HUFFMAN_EMIT_SYMBOL, 200), + (10, HUFFMAN_EMIT_SYMBOL, 200), + (15, HUFFMAN_EMIT_SYMBOL, 200), + (24, HUFFMAN_EMIT_SYMBOL, 200), + (31, HUFFMAN_EMIT_SYMBOL, 200), + (41, HUFFMAN_EMIT_SYMBOL, 200), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 200), + (3, HUFFMAN_EMIT_SYMBOL, 201), + (6, HUFFMAN_EMIT_SYMBOL, 201), + (10, HUFFMAN_EMIT_SYMBOL, 201), + (15, HUFFMAN_EMIT_SYMBOL, 201), + (24, HUFFMAN_EMIT_SYMBOL, 201), + (31, HUFFMAN_EMIT_SYMBOL, 201), + (41, HUFFMAN_EMIT_SYMBOL, 201), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 201), + + # Node 196 + (2, HUFFMAN_EMIT_SYMBOL, 202), + (9, HUFFMAN_EMIT_SYMBOL, 202), + (23, HUFFMAN_EMIT_SYMBOL, 202), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 202), + (2, HUFFMAN_EMIT_SYMBOL, 205), + (9, HUFFMAN_EMIT_SYMBOL, 205), + (23, HUFFMAN_EMIT_SYMBOL, 205), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 205), + (2, HUFFMAN_EMIT_SYMBOL, 210), + (9, HUFFMAN_EMIT_SYMBOL, 210), + (23, HUFFMAN_EMIT_SYMBOL, 210), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 210), + (2, HUFFMAN_EMIT_SYMBOL, 213), + (9, HUFFMAN_EMIT_SYMBOL, 213), + (23, HUFFMAN_EMIT_SYMBOL, 213), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 213), + + # Node 197 + (3, HUFFMAN_EMIT_SYMBOL, 202), + (6, HUFFMAN_EMIT_SYMBOL, 202), + (10, HUFFMAN_EMIT_SYMBOL, 202), + (15, HUFFMAN_EMIT_SYMBOL, 202), + (24, HUFFMAN_EMIT_SYMBOL, 202), + (31, HUFFMAN_EMIT_SYMBOL, 202), + (41, HUFFMAN_EMIT_SYMBOL, 202), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 202), + (3, HUFFMAN_EMIT_SYMBOL, 205), + (6, HUFFMAN_EMIT_SYMBOL, 205), + (10, HUFFMAN_EMIT_SYMBOL, 205), + (15, HUFFMAN_EMIT_SYMBOL, 205), + (24, HUFFMAN_EMIT_SYMBOL, 205), + (31, HUFFMAN_EMIT_SYMBOL, 205), + (41, HUFFMAN_EMIT_SYMBOL, 205), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 205), + + # Node 198 + (3, HUFFMAN_EMIT_SYMBOL, 210), + (6, HUFFMAN_EMIT_SYMBOL, 210), + (10, HUFFMAN_EMIT_SYMBOL, 210), + (15, HUFFMAN_EMIT_SYMBOL, 210), + (24, HUFFMAN_EMIT_SYMBOL, 210), + (31, HUFFMAN_EMIT_SYMBOL, 210), + (41, HUFFMAN_EMIT_SYMBOL, 210), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 210), + (3, HUFFMAN_EMIT_SYMBOL, 213), + (6, HUFFMAN_EMIT_SYMBOL, 213), + (10, HUFFMAN_EMIT_SYMBOL, 213), + (15, HUFFMAN_EMIT_SYMBOL, 213), + (24, HUFFMAN_EMIT_SYMBOL, 213), + (31, HUFFMAN_EMIT_SYMBOL, 213), + (41, HUFFMAN_EMIT_SYMBOL, 213), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 213), + + # Node 199 + (1, HUFFMAN_EMIT_SYMBOL, 218), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 218), + (1, HUFFMAN_EMIT_SYMBOL, 219), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 219), + (1, HUFFMAN_EMIT_SYMBOL, 238), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 238), + (1, HUFFMAN_EMIT_SYMBOL, 240), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 240), + (1, HUFFMAN_EMIT_SYMBOL, 242), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 242), + (1, HUFFMAN_EMIT_SYMBOL, 243), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 243), + (1, HUFFMAN_EMIT_SYMBOL, 255), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 255), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 203), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 204), + + # Node 200 + (2, HUFFMAN_EMIT_SYMBOL, 218), + (9, HUFFMAN_EMIT_SYMBOL, 218), + (23, HUFFMAN_EMIT_SYMBOL, 218), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 218), + (2, HUFFMAN_EMIT_SYMBOL, 219), + (9, HUFFMAN_EMIT_SYMBOL, 219), + (23, HUFFMAN_EMIT_SYMBOL, 219), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 219), + (2, HUFFMAN_EMIT_SYMBOL, 238), + (9, HUFFMAN_EMIT_SYMBOL, 238), + (23, HUFFMAN_EMIT_SYMBOL, 238), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 238), + (2, HUFFMAN_EMIT_SYMBOL, 240), + (9, HUFFMAN_EMIT_SYMBOL, 240), + (23, HUFFMAN_EMIT_SYMBOL, 240), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 240), + + # Node 201 + (3, HUFFMAN_EMIT_SYMBOL, 218), + (6, HUFFMAN_EMIT_SYMBOL, 218), + (10, HUFFMAN_EMIT_SYMBOL, 218), + (15, HUFFMAN_EMIT_SYMBOL, 218), + (24, HUFFMAN_EMIT_SYMBOL, 218), + (31, HUFFMAN_EMIT_SYMBOL, 218), + (41, HUFFMAN_EMIT_SYMBOL, 218), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 218), + (3, HUFFMAN_EMIT_SYMBOL, 219), + (6, HUFFMAN_EMIT_SYMBOL, 219), + (10, HUFFMAN_EMIT_SYMBOL, 219), + (15, HUFFMAN_EMIT_SYMBOL, 219), + (24, HUFFMAN_EMIT_SYMBOL, 219), + (31, HUFFMAN_EMIT_SYMBOL, 219), + (41, HUFFMAN_EMIT_SYMBOL, 219), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 219), + + # Node 202 + (3, HUFFMAN_EMIT_SYMBOL, 238), + (6, HUFFMAN_EMIT_SYMBOL, 238), + (10, HUFFMAN_EMIT_SYMBOL, 238), + (15, HUFFMAN_EMIT_SYMBOL, 238), + (24, HUFFMAN_EMIT_SYMBOL, 238), + (31, HUFFMAN_EMIT_SYMBOL, 238), + (41, HUFFMAN_EMIT_SYMBOL, 238), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 238), + (3, HUFFMAN_EMIT_SYMBOL, 240), + (6, HUFFMAN_EMIT_SYMBOL, 240), + (10, HUFFMAN_EMIT_SYMBOL, 240), + (15, HUFFMAN_EMIT_SYMBOL, 240), + (24, HUFFMAN_EMIT_SYMBOL, 240), + (31, HUFFMAN_EMIT_SYMBOL, 240), + (41, HUFFMAN_EMIT_SYMBOL, 240), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 240), + + # Node 203 + (2, HUFFMAN_EMIT_SYMBOL, 242), + (9, HUFFMAN_EMIT_SYMBOL, 242), + (23, HUFFMAN_EMIT_SYMBOL, 242), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 242), + (2, HUFFMAN_EMIT_SYMBOL, 243), + (9, HUFFMAN_EMIT_SYMBOL, 243), + (23, HUFFMAN_EMIT_SYMBOL, 243), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 243), + (2, HUFFMAN_EMIT_SYMBOL, 255), + (9, HUFFMAN_EMIT_SYMBOL, 255), + (23, HUFFMAN_EMIT_SYMBOL, 255), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 255), + (1, HUFFMAN_EMIT_SYMBOL, 203), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 203), + (1, HUFFMAN_EMIT_SYMBOL, 204), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 204), + + # Node 204 + (3, HUFFMAN_EMIT_SYMBOL, 242), + (6, HUFFMAN_EMIT_SYMBOL, 242), + (10, HUFFMAN_EMIT_SYMBOL, 242), + (15, HUFFMAN_EMIT_SYMBOL, 242), + (24, HUFFMAN_EMIT_SYMBOL, 242), + (31, HUFFMAN_EMIT_SYMBOL, 242), + (41, HUFFMAN_EMIT_SYMBOL, 242), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 242), + (3, HUFFMAN_EMIT_SYMBOL, 243), + (6, HUFFMAN_EMIT_SYMBOL, 243), + (10, HUFFMAN_EMIT_SYMBOL, 243), + (15, HUFFMAN_EMIT_SYMBOL, 243), + (24, HUFFMAN_EMIT_SYMBOL, 243), + (31, HUFFMAN_EMIT_SYMBOL, 243), + (41, HUFFMAN_EMIT_SYMBOL, 243), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 243), + + # Node 205 + (3, HUFFMAN_EMIT_SYMBOL, 255), + (6, HUFFMAN_EMIT_SYMBOL, 255), + (10, HUFFMAN_EMIT_SYMBOL, 255), + (15, HUFFMAN_EMIT_SYMBOL, 255), + (24, HUFFMAN_EMIT_SYMBOL, 255), + (31, HUFFMAN_EMIT_SYMBOL, 255), + (41, HUFFMAN_EMIT_SYMBOL, 255), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 255), + (2, HUFFMAN_EMIT_SYMBOL, 203), + (9, HUFFMAN_EMIT_SYMBOL, 203), + (23, HUFFMAN_EMIT_SYMBOL, 203), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 203), + (2, HUFFMAN_EMIT_SYMBOL, 204), + (9, HUFFMAN_EMIT_SYMBOL, 204), + (23, HUFFMAN_EMIT_SYMBOL, 204), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 204), + + # Node 206 + (3, HUFFMAN_EMIT_SYMBOL, 203), + (6, HUFFMAN_EMIT_SYMBOL, 203), + (10, HUFFMAN_EMIT_SYMBOL, 203), + (15, HUFFMAN_EMIT_SYMBOL, 203), + (24, HUFFMAN_EMIT_SYMBOL, 203), + (31, HUFFMAN_EMIT_SYMBOL, 203), + (41, HUFFMAN_EMIT_SYMBOL, 203), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 203), + (3, HUFFMAN_EMIT_SYMBOL, 204), + (6, HUFFMAN_EMIT_SYMBOL, 204), + (10, HUFFMAN_EMIT_SYMBOL, 204), + (15, HUFFMAN_EMIT_SYMBOL, 204), + (24, HUFFMAN_EMIT_SYMBOL, 204), + (31, HUFFMAN_EMIT_SYMBOL, 204), + (41, HUFFMAN_EMIT_SYMBOL, 204), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 204), + + # Node 207 + (211, 0, 0), + (212, 0, 0), + (214, 0, 0), + (215, 0, 0), + (218, 0, 0), + (219, 0, 0), + (221, 0, 0), + (222, 0, 0), + (226, 0, 0), + (228, 0, 0), + (232, 0, 0), + (235, 0, 0), + (240, 0, 0), + (243, 0, 0), + (247, 0, 0), + (250, 0, 0), + + # Node 208 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 211), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 212), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 214), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 221), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 222), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 223), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 241), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 244), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 245), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 246), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 247), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 248), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 250), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 251), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 252), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 253), + + # Node 209 + (1, HUFFMAN_EMIT_SYMBOL, 211), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 211), + (1, HUFFMAN_EMIT_SYMBOL, 212), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 212), + (1, HUFFMAN_EMIT_SYMBOL, 214), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 214), + (1, HUFFMAN_EMIT_SYMBOL, 221), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 221), + (1, HUFFMAN_EMIT_SYMBOL, 222), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 222), + (1, HUFFMAN_EMIT_SYMBOL, 223), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 223), + (1, HUFFMAN_EMIT_SYMBOL, 241), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 241), + (1, HUFFMAN_EMIT_SYMBOL, 244), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 244), + + # Node 210 + (2, HUFFMAN_EMIT_SYMBOL, 211), + (9, HUFFMAN_EMIT_SYMBOL, 211), + (23, HUFFMAN_EMIT_SYMBOL, 211), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 211), + (2, HUFFMAN_EMIT_SYMBOL, 212), + (9, HUFFMAN_EMIT_SYMBOL, 212), + (23, HUFFMAN_EMIT_SYMBOL, 212), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 212), + (2, HUFFMAN_EMIT_SYMBOL, 214), + (9, HUFFMAN_EMIT_SYMBOL, 214), + (23, HUFFMAN_EMIT_SYMBOL, 214), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 214), + (2, HUFFMAN_EMIT_SYMBOL, 221), + (9, HUFFMAN_EMIT_SYMBOL, 221), + (23, HUFFMAN_EMIT_SYMBOL, 221), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 221), + + # Node 211 + (3, HUFFMAN_EMIT_SYMBOL, 211), + (6, HUFFMAN_EMIT_SYMBOL, 211), + (10, HUFFMAN_EMIT_SYMBOL, 211), + (15, HUFFMAN_EMIT_SYMBOL, 211), + (24, HUFFMAN_EMIT_SYMBOL, 211), + (31, HUFFMAN_EMIT_SYMBOL, 211), + (41, HUFFMAN_EMIT_SYMBOL, 211), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 211), + (3, HUFFMAN_EMIT_SYMBOL, 212), + (6, HUFFMAN_EMIT_SYMBOL, 212), + (10, HUFFMAN_EMIT_SYMBOL, 212), + (15, HUFFMAN_EMIT_SYMBOL, 212), + (24, HUFFMAN_EMIT_SYMBOL, 212), + (31, HUFFMAN_EMIT_SYMBOL, 212), + (41, HUFFMAN_EMIT_SYMBOL, 212), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 212), + + # Node 212 + (3, HUFFMAN_EMIT_SYMBOL, 214), + (6, HUFFMAN_EMIT_SYMBOL, 214), + (10, HUFFMAN_EMIT_SYMBOL, 214), + (15, HUFFMAN_EMIT_SYMBOL, 214), + (24, HUFFMAN_EMIT_SYMBOL, 214), + (31, HUFFMAN_EMIT_SYMBOL, 214), + (41, HUFFMAN_EMIT_SYMBOL, 214), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 214), + (3, HUFFMAN_EMIT_SYMBOL, 221), + (6, HUFFMAN_EMIT_SYMBOL, 221), + (10, HUFFMAN_EMIT_SYMBOL, 221), + (15, HUFFMAN_EMIT_SYMBOL, 221), + (24, HUFFMAN_EMIT_SYMBOL, 221), + (31, HUFFMAN_EMIT_SYMBOL, 221), + (41, HUFFMAN_EMIT_SYMBOL, 221), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 221), + + # Node 213 + (2, HUFFMAN_EMIT_SYMBOL, 222), + (9, HUFFMAN_EMIT_SYMBOL, 222), + (23, HUFFMAN_EMIT_SYMBOL, 222), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 222), + (2, HUFFMAN_EMIT_SYMBOL, 223), + (9, HUFFMAN_EMIT_SYMBOL, 223), + (23, HUFFMAN_EMIT_SYMBOL, 223), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 223), + (2, HUFFMAN_EMIT_SYMBOL, 241), + (9, HUFFMAN_EMIT_SYMBOL, 241), + (23, HUFFMAN_EMIT_SYMBOL, 241), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 241), + (2, HUFFMAN_EMIT_SYMBOL, 244), + (9, HUFFMAN_EMIT_SYMBOL, 244), + (23, HUFFMAN_EMIT_SYMBOL, 244), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 244), + + # Node 214 + (3, HUFFMAN_EMIT_SYMBOL, 222), + (6, HUFFMAN_EMIT_SYMBOL, 222), + (10, HUFFMAN_EMIT_SYMBOL, 222), + (15, HUFFMAN_EMIT_SYMBOL, 222), + (24, HUFFMAN_EMIT_SYMBOL, 222), + (31, HUFFMAN_EMIT_SYMBOL, 222), + (41, HUFFMAN_EMIT_SYMBOL, 222), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 222), + (3, HUFFMAN_EMIT_SYMBOL, 223), + (6, HUFFMAN_EMIT_SYMBOL, 223), + (10, HUFFMAN_EMIT_SYMBOL, 223), + (15, HUFFMAN_EMIT_SYMBOL, 223), + (24, HUFFMAN_EMIT_SYMBOL, 223), + (31, HUFFMAN_EMIT_SYMBOL, 223), + (41, HUFFMAN_EMIT_SYMBOL, 223), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 223), + + # Node 215 + (3, HUFFMAN_EMIT_SYMBOL, 241), + (6, HUFFMAN_EMIT_SYMBOL, 241), + (10, HUFFMAN_EMIT_SYMBOL, 241), + (15, HUFFMAN_EMIT_SYMBOL, 241), + (24, HUFFMAN_EMIT_SYMBOL, 241), + (31, HUFFMAN_EMIT_SYMBOL, 241), + (41, HUFFMAN_EMIT_SYMBOL, 241), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 241), + (3, HUFFMAN_EMIT_SYMBOL, 244), + (6, HUFFMAN_EMIT_SYMBOL, 244), + (10, HUFFMAN_EMIT_SYMBOL, 244), + (15, HUFFMAN_EMIT_SYMBOL, 244), + (24, HUFFMAN_EMIT_SYMBOL, 244), + (31, HUFFMAN_EMIT_SYMBOL, 244), + (41, HUFFMAN_EMIT_SYMBOL, 244), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 244), + + # Node 216 + (1, HUFFMAN_EMIT_SYMBOL, 245), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 245), + (1, HUFFMAN_EMIT_SYMBOL, 246), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 246), + (1, HUFFMAN_EMIT_SYMBOL, 247), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 247), + (1, HUFFMAN_EMIT_SYMBOL, 248), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 248), + (1, HUFFMAN_EMIT_SYMBOL, 250), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 250), + (1, HUFFMAN_EMIT_SYMBOL, 251), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 251), + (1, HUFFMAN_EMIT_SYMBOL, 252), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 252), + (1, HUFFMAN_EMIT_SYMBOL, 253), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 253), + + # Node 217 + (2, HUFFMAN_EMIT_SYMBOL, 245), + (9, HUFFMAN_EMIT_SYMBOL, 245), + (23, HUFFMAN_EMIT_SYMBOL, 245), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 245), + (2, HUFFMAN_EMIT_SYMBOL, 246), + (9, HUFFMAN_EMIT_SYMBOL, 246), + (23, HUFFMAN_EMIT_SYMBOL, 246), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 246), + (2, HUFFMAN_EMIT_SYMBOL, 247), + (9, HUFFMAN_EMIT_SYMBOL, 247), + (23, HUFFMAN_EMIT_SYMBOL, 247), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 247), + (2, HUFFMAN_EMIT_SYMBOL, 248), + (9, HUFFMAN_EMIT_SYMBOL, 248), + (23, HUFFMAN_EMIT_SYMBOL, 248), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 248), + + # Node 218 + (3, HUFFMAN_EMIT_SYMBOL, 245), + (6, HUFFMAN_EMIT_SYMBOL, 245), + (10, HUFFMAN_EMIT_SYMBOL, 245), + (15, HUFFMAN_EMIT_SYMBOL, 245), + (24, HUFFMAN_EMIT_SYMBOL, 245), + (31, HUFFMAN_EMIT_SYMBOL, 245), + (41, HUFFMAN_EMIT_SYMBOL, 245), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 245), + (3, HUFFMAN_EMIT_SYMBOL, 246), + (6, HUFFMAN_EMIT_SYMBOL, 246), + (10, HUFFMAN_EMIT_SYMBOL, 246), + (15, HUFFMAN_EMIT_SYMBOL, 246), + (24, HUFFMAN_EMIT_SYMBOL, 246), + (31, HUFFMAN_EMIT_SYMBOL, 246), + (41, HUFFMAN_EMIT_SYMBOL, 246), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 246), + + # Node 219 + (3, HUFFMAN_EMIT_SYMBOL, 247), + (6, HUFFMAN_EMIT_SYMBOL, 247), + (10, HUFFMAN_EMIT_SYMBOL, 247), + (15, HUFFMAN_EMIT_SYMBOL, 247), + (24, HUFFMAN_EMIT_SYMBOL, 247), + (31, HUFFMAN_EMIT_SYMBOL, 247), + (41, HUFFMAN_EMIT_SYMBOL, 247), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 247), + (3, HUFFMAN_EMIT_SYMBOL, 248), + (6, HUFFMAN_EMIT_SYMBOL, 248), + (10, HUFFMAN_EMIT_SYMBOL, 248), + (15, HUFFMAN_EMIT_SYMBOL, 248), + (24, HUFFMAN_EMIT_SYMBOL, 248), + (31, HUFFMAN_EMIT_SYMBOL, 248), + (41, HUFFMAN_EMIT_SYMBOL, 248), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 248), + + # Node 220 + (2, HUFFMAN_EMIT_SYMBOL, 250), + (9, HUFFMAN_EMIT_SYMBOL, 250), + (23, HUFFMAN_EMIT_SYMBOL, 250), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 250), + (2, HUFFMAN_EMIT_SYMBOL, 251), + (9, HUFFMAN_EMIT_SYMBOL, 251), + (23, HUFFMAN_EMIT_SYMBOL, 251), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 251), + (2, HUFFMAN_EMIT_SYMBOL, 252), + (9, HUFFMAN_EMIT_SYMBOL, 252), + (23, HUFFMAN_EMIT_SYMBOL, 252), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 252), + (2, HUFFMAN_EMIT_SYMBOL, 253), + (9, HUFFMAN_EMIT_SYMBOL, 253), + (23, HUFFMAN_EMIT_SYMBOL, 253), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 253), + + # Node 221 + (3, HUFFMAN_EMIT_SYMBOL, 250), + (6, HUFFMAN_EMIT_SYMBOL, 250), + (10, HUFFMAN_EMIT_SYMBOL, 250), + (15, HUFFMAN_EMIT_SYMBOL, 250), + (24, HUFFMAN_EMIT_SYMBOL, 250), + (31, HUFFMAN_EMIT_SYMBOL, 250), + (41, HUFFMAN_EMIT_SYMBOL, 250), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 250), + (3, HUFFMAN_EMIT_SYMBOL, 251), + (6, HUFFMAN_EMIT_SYMBOL, 251), + (10, HUFFMAN_EMIT_SYMBOL, 251), + (15, HUFFMAN_EMIT_SYMBOL, 251), + (24, HUFFMAN_EMIT_SYMBOL, 251), + (31, HUFFMAN_EMIT_SYMBOL, 251), + (41, HUFFMAN_EMIT_SYMBOL, 251), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 251), + + # Node 222 + (3, HUFFMAN_EMIT_SYMBOL, 252), + (6, HUFFMAN_EMIT_SYMBOL, 252), + (10, HUFFMAN_EMIT_SYMBOL, 252), + (15, HUFFMAN_EMIT_SYMBOL, 252), + (24, HUFFMAN_EMIT_SYMBOL, 252), + (31, HUFFMAN_EMIT_SYMBOL, 252), + (41, HUFFMAN_EMIT_SYMBOL, 252), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 252), + (3, HUFFMAN_EMIT_SYMBOL, 253), + (6, HUFFMAN_EMIT_SYMBOL, 253), + (10, HUFFMAN_EMIT_SYMBOL, 253), + (15, HUFFMAN_EMIT_SYMBOL, 253), + (24, HUFFMAN_EMIT_SYMBOL, 253), + (31, HUFFMAN_EMIT_SYMBOL, 253), + (41, HUFFMAN_EMIT_SYMBOL, 253), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 253), + + # Node 223 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 254), + (227, 0, 0), + (229, 0, 0), + (230, 0, 0), + (233, 0, 0), + (234, 0, 0), + (236, 0, 0), + (237, 0, 0), + (241, 0, 0), + (242, 0, 0), + (244, 0, 0), + (245, 0, 0), + (248, 0, 0), + (249, 0, 0), + (251, 0, 0), + (252, 0, 0), + + # Node 224 + (1, HUFFMAN_EMIT_SYMBOL, 254), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 254), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 2), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 3), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 4), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 5), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 6), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 7), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 8), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 11), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 12), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 14), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 15), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 16), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 17), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 18), + + # Node 225 + (2, HUFFMAN_EMIT_SYMBOL, 254), + (9, HUFFMAN_EMIT_SYMBOL, 254), + (23, HUFFMAN_EMIT_SYMBOL, 254), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 254), + (1, HUFFMAN_EMIT_SYMBOL, 2), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 2), + (1, HUFFMAN_EMIT_SYMBOL, 3), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 3), + (1, HUFFMAN_EMIT_SYMBOL, 4), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 4), + (1, HUFFMAN_EMIT_SYMBOL, 5), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 5), + (1, HUFFMAN_EMIT_SYMBOL, 6), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 6), + (1, HUFFMAN_EMIT_SYMBOL, 7), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 7), + + # Node 226 + (3, HUFFMAN_EMIT_SYMBOL, 254), + (6, HUFFMAN_EMIT_SYMBOL, 254), + (10, HUFFMAN_EMIT_SYMBOL, 254), + (15, HUFFMAN_EMIT_SYMBOL, 254), + (24, HUFFMAN_EMIT_SYMBOL, 254), + (31, HUFFMAN_EMIT_SYMBOL, 254), + (41, HUFFMAN_EMIT_SYMBOL, 254), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 254), + (2, HUFFMAN_EMIT_SYMBOL, 2), + (9, HUFFMAN_EMIT_SYMBOL, 2), + (23, HUFFMAN_EMIT_SYMBOL, 2), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 2), + (2, HUFFMAN_EMIT_SYMBOL, 3), + (9, HUFFMAN_EMIT_SYMBOL, 3), + (23, HUFFMAN_EMIT_SYMBOL, 3), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 3), + + # Node 227 + (3, HUFFMAN_EMIT_SYMBOL, 2), + (6, HUFFMAN_EMIT_SYMBOL, 2), + (10, HUFFMAN_EMIT_SYMBOL, 2), + (15, HUFFMAN_EMIT_SYMBOL, 2), + (24, HUFFMAN_EMIT_SYMBOL, 2), + (31, HUFFMAN_EMIT_SYMBOL, 2), + (41, HUFFMAN_EMIT_SYMBOL, 2), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 2), + (3, HUFFMAN_EMIT_SYMBOL, 3), + (6, HUFFMAN_EMIT_SYMBOL, 3), + (10, HUFFMAN_EMIT_SYMBOL, 3), + (15, HUFFMAN_EMIT_SYMBOL, 3), + (24, HUFFMAN_EMIT_SYMBOL, 3), + (31, HUFFMAN_EMIT_SYMBOL, 3), + (41, HUFFMAN_EMIT_SYMBOL, 3), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 3), + + # Node 228 + (2, HUFFMAN_EMIT_SYMBOL, 4), + (9, HUFFMAN_EMIT_SYMBOL, 4), + (23, HUFFMAN_EMIT_SYMBOL, 4), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 4), + (2, HUFFMAN_EMIT_SYMBOL, 5), + (9, HUFFMAN_EMIT_SYMBOL, 5), + (23, HUFFMAN_EMIT_SYMBOL, 5), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 5), + (2, HUFFMAN_EMIT_SYMBOL, 6), + (9, HUFFMAN_EMIT_SYMBOL, 6), + (23, HUFFMAN_EMIT_SYMBOL, 6), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 6), + (2, HUFFMAN_EMIT_SYMBOL, 7), + (9, HUFFMAN_EMIT_SYMBOL, 7), + (23, HUFFMAN_EMIT_SYMBOL, 7), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 7), + + # Node 229 + (3, HUFFMAN_EMIT_SYMBOL, 4), + (6, HUFFMAN_EMIT_SYMBOL, 4), + (10, HUFFMAN_EMIT_SYMBOL, 4), + (15, HUFFMAN_EMIT_SYMBOL, 4), + (24, HUFFMAN_EMIT_SYMBOL, 4), + (31, HUFFMAN_EMIT_SYMBOL, 4), + (41, HUFFMAN_EMIT_SYMBOL, 4), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 4), + (3, HUFFMAN_EMIT_SYMBOL, 5), + (6, HUFFMAN_EMIT_SYMBOL, 5), + (10, HUFFMAN_EMIT_SYMBOL, 5), + (15, HUFFMAN_EMIT_SYMBOL, 5), + (24, HUFFMAN_EMIT_SYMBOL, 5), + (31, HUFFMAN_EMIT_SYMBOL, 5), + (41, HUFFMAN_EMIT_SYMBOL, 5), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 5), + + # Node 230 + (3, HUFFMAN_EMIT_SYMBOL, 6), + (6, HUFFMAN_EMIT_SYMBOL, 6), + (10, HUFFMAN_EMIT_SYMBOL, 6), + (15, HUFFMAN_EMIT_SYMBOL, 6), + (24, HUFFMAN_EMIT_SYMBOL, 6), + (31, HUFFMAN_EMIT_SYMBOL, 6), + (41, HUFFMAN_EMIT_SYMBOL, 6), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 6), + (3, HUFFMAN_EMIT_SYMBOL, 7), + (6, HUFFMAN_EMIT_SYMBOL, 7), + (10, HUFFMAN_EMIT_SYMBOL, 7), + (15, HUFFMAN_EMIT_SYMBOL, 7), + (24, HUFFMAN_EMIT_SYMBOL, 7), + (31, HUFFMAN_EMIT_SYMBOL, 7), + (41, HUFFMAN_EMIT_SYMBOL, 7), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 7), + + # Node 231 + (1, HUFFMAN_EMIT_SYMBOL, 8), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 8), + (1, HUFFMAN_EMIT_SYMBOL, 11), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 11), + (1, HUFFMAN_EMIT_SYMBOL, 12), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 12), + (1, HUFFMAN_EMIT_SYMBOL, 14), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 14), + (1, HUFFMAN_EMIT_SYMBOL, 15), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 15), + (1, HUFFMAN_EMIT_SYMBOL, 16), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 16), + (1, HUFFMAN_EMIT_SYMBOL, 17), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 17), + (1, HUFFMAN_EMIT_SYMBOL, 18), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 18), + + # Node 232 + (2, HUFFMAN_EMIT_SYMBOL, 8), + (9, HUFFMAN_EMIT_SYMBOL, 8), + (23, HUFFMAN_EMIT_SYMBOL, 8), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 8), + (2, HUFFMAN_EMIT_SYMBOL, 11), + (9, HUFFMAN_EMIT_SYMBOL, 11), + (23, HUFFMAN_EMIT_SYMBOL, 11), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 11), + (2, HUFFMAN_EMIT_SYMBOL, 12), + (9, HUFFMAN_EMIT_SYMBOL, 12), + (23, HUFFMAN_EMIT_SYMBOL, 12), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 12), + (2, HUFFMAN_EMIT_SYMBOL, 14), + (9, HUFFMAN_EMIT_SYMBOL, 14), + (23, HUFFMAN_EMIT_SYMBOL, 14), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 14), + + # Node 233 + (3, HUFFMAN_EMIT_SYMBOL, 8), + (6, HUFFMAN_EMIT_SYMBOL, 8), + (10, HUFFMAN_EMIT_SYMBOL, 8), + (15, HUFFMAN_EMIT_SYMBOL, 8), + (24, HUFFMAN_EMIT_SYMBOL, 8), + (31, HUFFMAN_EMIT_SYMBOL, 8), + (41, HUFFMAN_EMIT_SYMBOL, 8), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 8), + (3, HUFFMAN_EMIT_SYMBOL, 11), + (6, HUFFMAN_EMIT_SYMBOL, 11), + (10, HUFFMAN_EMIT_SYMBOL, 11), + (15, HUFFMAN_EMIT_SYMBOL, 11), + (24, HUFFMAN_EMIT_SYMBOL, 11), + (31, HUFFMAN_EMIT_SYMBOL, 11), + (41, HUFFMAN_EMIT_SYMBOL, 11), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 11), + + # Node 234 + (3, HUFFMAN_EMIT_SYMBOL, 12), + (6, HUFFMAN_EMIT_SYMBOL, 12), + (10, HUFFMAN_EMIT_SYMBOL, 12), + (15, HUFFMAN_EMIT_SYMBOL, 12), + (24, HUFFMAN_EMIT_SYMBOL, 12), + (31, HUFFMAN_EMIT_SYMBOL, 12), + (41, HUFFMAN_EMIT_SYMBOL, 12), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 12), + (3, HUFFMAN_EMIT_SYMBOL, 14), + (6, HUFFMAN_EMIT_SYMBOL, 14), + (10, HUFFMAN_EMIT_SYMBOL, 14), + (15, HUFFMAN_EMIT_SYMBOL, 14), + (24, HUFFMAN_EMIT_SYMBOL, 14), + (31, HUFFMAN_EMIT_SYMBOL, 14), + (41, HUFFMAN_EMIT_SYMBOL, 14), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 14), + + # Node 235 + (2, HUFFMAN_EMIT_SYMBOL, 15), + (9, HUFFMAN_EMIT_SYMBOL, 15), + (23, HUFFMAN_EMIT_SYMBOL, 15), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 15), + (2, HUFFMAN_EMIT_SYMBOL, 16), + (9, HUFFMAN_EMIT_SYMBOL, 16), + (23, HUFFMAN_EMIT_SYMBOL, 16), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 16), + (2, HUFFMAN_EMIT_SYMBOL, 17), + (9, HUFFMAN_EMIT_SYMBOL, 17), + (23, HUFFMAN_EMIT_SYMBOL, 17), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 17), + (2, HUFFMAN_EMIT_SYMBOL, 18), + (9, HUFFMAN_EMIT_SYMBOL, 18), + (23, HUFFMAN_EMIT_SYMBOL, 18), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 18), + + # Node 236 + (3, HUFFMAN_EMIT_SYMBOL, 15), + (6, HUFFMAN_EMIT_SYMBOL, 15), + (10, HUFFMAN_EMIT_SYMBOL, 15), + (15, HUFFMAN_EMIT_SYMBOL, 15), + (24, HUFFMAN_EMIT_SYMBOL, 15), + (31, HUFFMAN_EMIT_SYMBOL, 15), + (41, HUFFMAN_EMIT_SYMBOL, 15), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 15), + (3, HUFFMAN_EMIT_SYMBOL, 16), + (6, HUFFMAN_EMIT_SYMBOL, 16), + (10, HUFFMAN_EMIT_SYMBOL, 16), + (15, HUFFMAN_EMIT_SYMBOL, 16), + (24, HUFFMAN_EMIT_SYMBOL, 16), + (31, HUFFMAN_EMIT_SYMBOL, 16), + (41, HUFFMAN_EMIT_SYMBOL, 16), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 16), + + # Node 237 + (3, HUFFMAN_EMIT_SYMBOL, 17), + (6, HUFFMAN_EMIT_SYMBOL, 17), + (10, HUFFMAN_EMIT_SYMBOL, 17), + (15, HUFFMAN_EMIT_SYMBOL, 17), + (24, HUFFMAN_EMIT_SYMBOL, 17), + (31, HUFFMAN_EMIT_SYMBOL, 17), + (41, HUFFMAN_EMIT_SYMBOL, 17), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 17), + (3, HUFFMAN_EMIT_SYMBOL, 18), + (6, HUFFMAN_EMIT_SYMBOL, 18), + (10, HUFFMAN_EMIT_SYMBOL, 18), + (15, HUFFMAN_EMIT_SYMBOL, 18), + (24, HUFFMAN_EMIT_SYMBOL, 18), + (31, HUFFMAN_EMIT_SYMBOL, 18), + (41, HUFFMAN_EMIT_SYMBOL, 18), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 18), + + # Node 238 + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 19), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 20), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 21), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 23), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 24), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 25), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 26), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 27), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 28), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 29), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 30), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 31), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 127), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 220), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 249), + (253, 0, 0), + + # Node 239 + (1, HUFFMAN_EMIT_SYMBOL, 19), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 19), + (1, HUFFMAN_EMIT_SYMBOL, 20), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 20), + (1, HUFFMAN_EMIT_SYMBOL, 21), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 21), + (1, HUFFMAN_EMIT_SYMBOL, 23), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 23), + (1, HUFFMAN_EMIT_SYMBOL, 24), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 24), + (1, HUFFMAN_EMIT_SYMBOL, 25), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 25), + (1, HUFFMAN_EMIT_SYMBOL, 26), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 26), + (1, HUFFMAN_EMIT_SYMBOL, 27), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 27), + + # Node 240 + (2, HUFFMAN_EMIT_SYMBOL, 19), + (9, HUFFMAN_EMIT_SYMBOL, 19), + (23, HUFFMAN_EMIT_SYMBOL, 19), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 19), + (2, HUFFMAN_EMIT_SYMBOL, 20), + (9, HUFFMAN_EMIT_SYMBOL, 20), + (23, HUFFMAN_EMIT_SYMBOL, 20), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 20), + (2, HUFFMAN_EMIT_SYMBOL, 21), + (9, HUFFMAN_EMIT_SYMBOL, 21), + (23, HUFFMAN_EMIT_SYMBOL, 21), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 21), + (2, HUFFMAN_EMIT_SYMBOL, 23), + (9, HUFFMAN_EMIT_SYMBOL, 23), + (23, HUFFMAN_EMIT_SYMBOL, 23), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 23), + + # Node 241 + (3, HUFFMAN_EMIT_SYMBOL, 19), + (6, HUFFMAN_EMIT_SYMBOL, 19), + (10, HUFFMAN_EMIT_SYMBOL, 19), + (15, HUFFMAN_EMIT_SYMBOL, 19), + (24, HUFFMAN_EMIT_SYMBOL, 19), + (31, HUFFMAN_EMIT_SYMBOL, 19), + (41, HUFFMAN_EMIT_SYMBOL, 19), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 19), + (3, HUFFMAN_EMIT_SYMBOL, 20), + (6, HUFFMAN_EMIT_SYMBOL, 20), + (10, HUFFMAN_EMIT_SYMBOL, 20), + (15, HUFFMAN_EMIT_SYMBOL, 20), + (24, HUFFMAN_EMIT_SYMBOL, 20), + (31, HUFFMAN_EMIT_SYMBOL, 20), + (41, HUFFMAN_EMIT_SYMBOL, 20), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 20), + + # Node 242 + (3, HUFFMAN_EMIT_SYMBOL, 21), + (6, HUFFMAN_EMIT_SYMBOL, 21), + (10, HUFFMAN_EMIT_SYMBOL, 21), + (15, HUFFMAN_EMIT_SYMBOL, 21), + (24, HUFFMAN_EMIT_SYMBOL, 21), + (31, HUFFMAN_EMIT_SYMBOL, 21), + (41, HUFFMAN_EMIT_SYMBOL, 21), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 21), + (3, HUFFMAN_EMIT_SYMBOL, 23), + (6, HUFFMAN_EMIT_SYMBOL, 23), + (10, HUFFMAN_EMIT_SYMBOL, 23), + (15, HUFFMAN_EMIT_SYMBOL, 23), + (24, HUFFMAN_EMIT_SYMBOL, 23), + (31, HUFFMAN_EMIT_SYMBOL, 23), + (41, HUFFMAN_EMIT_SYMBOL, 23), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 23), + + # Node 243 + (2, HUFFMAN_EMIT_SYMBOL, 24), + (9, HUFFMAN_EMIT_SYMBOL, 24), + (23, HUFFMAN_EMIT_SYMBOL, 24), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 24), + (2, HUFFMAN_EMIT_SYMBOL, 25), + (9, HUFFMAN_EMIT_SYMBOL, 25), + (23, HUFFMAN_EMIT_SYMBOL, 25), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 25), + (2, HUFFMAN_EMIT_SYMBOL, 26), + (9, HUFFMAN_EMIT_SYMBOL, 26), + (23, HUFFMAN_EMIT_SYMBOL, 26), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 26), + (2, HUFFMAN_EMIT_SYMBOL, 27), + (9, HUFFMAN_EMIT_SYMBOL, 27), + (23, HUFFMAN_EMIT_SYMBOL, 27), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 27), + + # Node 244 + (3, HUFFMAN_EMIT_SYMBOL, 24), + (6, HUFFMAN_EMIT_SYMBOL, 24), + (10, HUFFMAN_EMIT_SYMBOL, 24), + (15, HUFFMAN_EMIT_SYMBOL, 24), + (24, HUFFMAN_EMIT_SYMBOL, 24), + (31, HUFFMAN_EMIT_SYMBOL, 24), + (41, HUFFMAN_EMIT_SYMBOL, 24), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 24), + (3, HUFFMAN_EMIT_SYMBOL, 25), + (6, HUFFMAN_EMIT_SYMBOL, 25), + (10, HUFFMAN_EMIT_SYMBOL, 25), + (15, HUFFMAN_EMIT_SYMBOL, 25), + (24, HUFFMAN_EMIT_SYMBOL, 25), + (31, HUFFMAN_EMIT_SYMBOL, 25), + (41, HUFFMAN_EMIT_SYMBOL, 25), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 25), + + # Node 245 + (3, HUFFMAN_EMIT_SYMBOL, 26), + (6, HUFFMAN_EMIT_SYMBOL, 26), + (10, HUFFMAN_EMIT_SYMBOL, 26), + (15, HUFFMAN_EMIT_SYMBOL, 26), + (24, HUFFMAN_EMIT_SYMBOL, 26), + (31, HUFFMAN_EMIT_SYMBOL, 26), + (41, HUFFMAN_EMIT_SYMBOL, 26), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 26), + (3, HUFFMAN_EMIT_SYMBOL, 27), + (6, HUFFMAN_EMIT_SYMBOL, 27), + (10, HUFFMAN_EMIT_SYMBOL, 27), + (15, HUFFMAN_EMIT_SYMBOL, 27), + (24, HUFFMAN_EMIT_SYMBOL, 27), + (31, HUFFMAN_EMIT_SYMBOL, 27), + (41, HUFFMAN_EMIT_SYMBOL, 27), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 27), + + # Node 246 + (1, HUFFMAN_EMIT_SYMBOL, 28), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 28), + (1, HUFFMAN_EMIT_SYMBOL, 29), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 29), + (1, HUFFMAN_EMIT_SYMBOL, 30), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 30), + (1, HUFFMAN_EMIT_SYMBOL, 31), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 31), + (1, HUFFMAN_EMIT_SYMBOL, 127), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 127), + (1, HUFFMAN_EMIT_SYMBOL, 220), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 220), + (1, HUFFMAN_EMIT_SYMBOL, 249), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 249), + (254, 0, 0), + (255, 0, 0), + + # Node 247 + (2, HUFFMAN_EMIT_SYMBOL, 28), + (9, HUFFMAN_EMIT_SYMBOL, 28), + (23, HUFFMAN_EMIT_SYMBOL, 28), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 28), + (2, HUFFMAN_EMIT_SYMBOL, 29), + (9, HUFFMAN_EMIT_SYMBOL, 29), + (23, HUFFMAN_EMIT_SYMBOL, 29), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 29), + (2, HUFFMAN_EMIT_SYMBOL, 30), + (9, HUFFMAN_EMIT_SYMBOL, 30), + (23, HUFFMAN_EMIT_SYMBOL, 30), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 30), + (2, HUFFMAN_EMIT_SYMBOL, 31), + (9, HUFFMAN_EMIT_SYMBOL, 31), + (23, HUFFMAN_EMIT_SYMBOL, 31), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 31), + + # Node 248 + (3, HUFFMAN_EMIT_SYMBOL, 28), + (6, HUFFMAN_EMIT_SYMBOL, 28), + (10, HUFFMAN_EMIT_SYMBOL, 28), + (15, HUFFMAN_EMIT_SYMBOL, 28), + (24, HUFFMAN_EMIT_SYMBOL, 28), + (31, HUFFMAN_EMIT_SYMBOL, 28), + (41, HUFFMAN_EMIT_SYMBOL, 28), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 28), + (3, HUFFMAN_EMIT_SYMBOL, 29), + (6, HUFFMAN_EMIT_SYMBOL, 29), + (10, HUFFMAN_EMIT_SYMBOL, 29), + (15, HUFFMAN_EMIT_SYMBOL, 29), + (24, HUFFMAN_EMIT_SYMBOL, 29), + (31, HUFFMAN_EMIT_SYMBOL, 29), + (41, HUFFMAN_EMIT_SYMBOL, 29), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 29), + + # Node 249 + (3, HUFFMAN_EMIT_SYMBOL, 30), + (6, HUFFMAN_EMIT_SYMBOL, 30), + (10, HUFFMAN_EMIT_SYMBOL, 30), + (15, HUFFMAN_EMIT_SYMBOL, 30), + (24, HUFFMAN_EMIT_SYMBOL, 30), + (31, HUFFMAN_EMIT_SYMBOL, 30), + (41, HUFFMAN_EMIT_SYMBOL, 30), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 30), + (3, HUFFMAN_EMIT_SYMBOL, 31), + (6, HUFFMAN_EMIT_SYMBOL, 31), + (10, HUFFMAN_EMIT_SYMBOL, 31), + (15, HUFFMAN_EMIT_SYMBOL, 31), + (24, HUFFMAN_EMIT_SYMBOL, 31), + (31, HUFFMAN_EMIT_SYMBOL, 31), + (41, HUFFMAN_EMIT_SYMBOL, 31), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 31), + + # Node 250 + (2, HUFFMAN_EMIT_SYMBOL, 127), + (9, HUFFMAN_EMIT_SYMBOL, 127), + (23, HUFFMAN_EMIT_SYMBOL, 127), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 127), + (2, HUFFMAN_EMIT_SYMBOL, 220), + (9, HUFFMAN_EMIT_SYMBOL, 220), + (23, HUFFMAN_EMIT_SYMBOL, 220), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 220), + (2, HUFFMAN_EMIT_SYMBOL, 249), + (9, HUFFMAN_EMIT_SYMBOL, 249), + (23, HUFFMAN_EMIT_SYMBOL, 249), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 249), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 10), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 13), + (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 22), + (0, HUFFMAN_FAIL, 0), + + # Node 251 + (3, HUFFMAN_EMIT_SYMBOL, 127), + (6, HUFFMAN_EMIT_SYMBOL, 127), + (10, HUFFMAN_EMIT_SYMBOL, 127), + (15, HUFFMAN_EMIT_SYMBOL, 127), + (24, HUFFMAN_EMIT_SYMBOL, 127), + (31, HUFFMAN_EMIT_SYMBOL, 127), + (41, HUFFMAN_EMIT_SYMBOL, 127), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 127), + (3, HUFFMAN_EMIT_SYMBOL, 220), + (6, HUFFMAN_EMIT_SYMBOL, 220), + (10, HUFFMAN_EMIT_SYMBOL, 220), + (15, HUFFMAN_EMIT_SYMBOL, 220), + (24, HUFFMAN_EMIT_SYMBOL, 220), + (31, HUFFMAN_EMIT_SYMBOL, 220), + (41, HUFFMAN_EMIT_SYMBOL, 220), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 220), + + # Node 252 + (3, HUFFMAN_EMIT_SYMBOL, 249), + (6, HUFFMAN_EMIT_SYMBOL, 249), + (10, HUFFMAN_EMIT_SYMBOL, 249), + (15, HUFFMAN_EMIT_SYMBOL, 249), + (24, HUFFMAN_EMIT_SYMBOL, 249), + (31, HUFFMAN_EMIT_SYMBOL, 249), + (41, HUFFMAN_EMIT_SYMBOL, 249), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 249), + (1, HUFFMAN_EMIT_SYMBOL, 10), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 10), + (1, HUFFMAN_EMIT_SYMBOL, 13), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 13), + (1, HUFFMAN_EMIT_SYMBOL, 22), + (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 22), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + + # Node 253 + (2, HUFFMAN_EMIT_SYMBOL, 10), + (9, HUFFMAN_EMIT_SYMBOL, 10), + (23, HUFFMAN_EMIT_SYMBOL, 10), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 10), + (2, HUFFMAN_EMIT_SYMBOL, 13), + (9, HUFFMAN_EMIT_SYMBOL, 13), + (23, HUFFMAN_EMIT_SYMBOL, 13), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 13), + (2, HUFFMAN_EMIT_SYMBOL, 22), + (9, HUFFMAN_EMIT_SYMBOL, 22), + (23, HUFFMAN_EMIT_SYMBOL, 22), + (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 22), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + + # Node 254 + (3, HUFFMAN_EMIT_SYMBOL, 10), + (6, HUFFMAN_EMIT_SYMBOL, 10), + (10, HUFFMAN_EMIT_SYMBOL, 10), + (15, HUFFMAN_EMIT_SYMBOL, 10), + (24, HUFFMAN_EMIT_SYMBOL, 10), + (31, HUFFMAN_EMIT_SYMBOL, 10), + (41, HUFFMAN_EMIT_SYMBOL, 10), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 10), + (3, HUFFMAN_EMIT_SYMBOL, 13), + (6, HUFFMAN_EMIT_SYMBOL, 13), + (10, HUFFMAN_EMIT_SYMBOL, 13), + (15, HUFFMAN_EMIT_SYMBOL, 13), + (24, HUFFMAN_EMIT_SYMBOL, 13), + (31, HUFFMAN_EMIT_SYMBOL, 13), + (41, HUFFMAN_EMIT_SYMBOL, 13), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 13), + + # Node 255 + (3, HUFFMAN_EMIT_SYMBOL, 22), + (6, HUFFMAN_EMIT_SYMBOL, 22), + (10, HUFFMAN_EMIT_SYMBOL, 22), + (15, HUFFMAN_EMIT_SYMBOL, 22), + (24, HUFFMAN_EMIT_SYMBOL, 22), + (31, HUFFMAN_EMIT_SYMBOL, 22), + (41, HUFFMAN_EMIT_SYMBOL, 22), + (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 22), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), + (0, HUFFMAN_FAIL, 0), +] diff --git a/py3/lib/python3.6/site-packages/hpack/struct.py b/py3/lib/python3.6/site-packages/hpack/struct.py new file mode 100644 index 0000000..e860cd7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/struct.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +""" +hpack/struct +~~~~~~~~~~~~ + +Contains structures for representing header fields with associated metadata. +""" + + +class HeaderTuple(tuple): + """ + A data structure that stores a single header field. + + HTTP headers can be thought of as tuples of ``(field name, field value)``. + A single header block is a sequence of such tuples. + + In HTTP/2, however, certain bits of additional information are required for + compressing these headers: in particular, whether the header field can be + safely added to the HPACK compression context. + + This class stores a header that can be added to the compression context. In + all other ways it behaves exactly like a tuple. + """ + __slots__ = () + + indexable = True + + def __new__(_cls, *args): + return tuple.__new__(_cls, args) + + +class NeverIndexedHeaderTuple(HeaderTuple): + """ + A data structure that stores a single header field that cannot be added to + a HTTP/2 header compression context. + """ + __slots__ = () + + indexable = False diff --git a/py3/lib/python3.6/site-packages/hpack/table.py b/py3/lib/python3.6/site-packages/hpack/table.py new file mode 100644 index 0000000..9a89c72 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hpack/table.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +# flake8: noqa +from collections import deque +import logging + +from .exceptions import InvalidTableIndex + +log = logging.getLogger(__name__) + + +def table_entry_size(name, value): + """ + Calculates the size of a single entry + + This size is mostly irrelevant to us and defined + specifically to accommodate memory management for + lower level implementations. The 32 extra bytes are + considered the "maximum" overhead that would be + required to represent each entry in the table. + + See RFC7541 Section 4.1 + """ + return 32 + len(name) + len(value) + + +class HeaderTable(object): + """ + Implements the combined static and dynamic header table + + The name and value arguments for all the functions + should ONLY be byte strings (b'') however this is not + strictly enforced in the interface. + + See RFC7541 Section 2.3 + """ + #: Default maximum size of the dynamic table. See + #: RFC7540 Section 6.5.2. + DEFAULT_SIZE = 4096 + + #: Constant list of static headers. See RFC7541 Section + #: 2.3.1 and Appendix A + STATIC_TABLE = ( + (b':authority' , b'' ), # noqa + (b':method' , b'GET' ), # noqa + (b':method' , b'POST' ), # noqa + (b':path' , b'/' ), # noqa + (b':path' , b'/index.html' ), # noqa + (b':scheme' , b'http' ), # noqa + (b':scheme' , b'https' ), # noqa + (b':status' , b'200' ), # noqa + (b':status' , b'204' ), # noqa + (b':status' , b'206' ), # noqa + (b':status' , b'304' ), # noqa + (b':status' , b'400' ), # noqa + (b':status' , b'404' ), # noqa + (b':status' , b'500' ), # noqa + (b'accept-charset' , b'' ), # noqa + (b'accept-encoding' , b'gzip, deflate'), # noqa + (b'accept-language' , b'' ), # noqa + (b'accept-ranges' , b'' ), # noqa + (b'accept' , b'' ), # noqa + (b'access-control-allow-origin' , b'' ), # noqa + (b'age' , b'' ), # noqa + (b'allow' , b'' ), # noqa + (b'authorization' , b'' ), # noqa + (b'cache-control' , b'' ), # noqa + (b'content-disposition' , b'' ), # noqa + (b'content-encoding' , b'' ), # noqa + (b'content-language' , b'' ), # noqa + (b'content-length' , b'' ), # noqa + (b'content-location' , b'' ), # noqa + (b'content-range' , b'' ), # noqa + (b'content-type' , b'' ), # noqa + (b'cookie' , b'' ), # noqa + (b'date' , b'' ), # noqa + (b'etag' , b'' ), # noqa + (b'expect' , b'' ), # noqa + (b'expires' , b'' ), # noqa + (b'from' , b'' ), # noqa + (b'host' , b'' ), # noqa + (b'if-match' , b'' ), # noqa + (b'if-modified-since' , b'' ), # noqa + (b'if-none-match' , b'' ), # noqa + (b'if-range' , b'' ), # noqa + (b'if-unmodified-since' , b'' ), # noqa + (b'last-modified' , b'' ), # noqa + (b'link' , b'' ), # noqa + (b'location' , b'' ), # noqa + (b'max-forwards' , b'' ), # noqa + (b'proxy-authenticate' , b'' ), # noqa + (b'proxy-authorization' , b'' ), # noqa + (b'range' , b'' ), # noqa + (b'referer' , b'' ), # noqa + (b'refresh' , b'' ), # noqa + (b'retry-after' , b'' ), # noqa + (b'server' , b'' ), # noqa + (b'set-cookie' , b'' ), # noqa + (b'strict-transport-security' , b'' ), # noqa + (b'transfer-encoding' , b'' ), # noqa + (b'user-agent' , b'' ), # noqa + (b'vary' , b'' ), # noqa + (b'via' , b'' ), # noqa + (b'www-authenticate' , b'' ), # noqa + ) # noqa + + STATIC_TABLE_LENGTH = len(STATIC_TABLE) + + def __init__(self): + self._maxsize = HeaderTable.DEFAULT_SIZE + self._current_size = 0 + self.resized = False + self.dynamic_entries = deque() + + def get_by_index(self, index): + """ + Returns the entry specified by index + + Note that the table is 1-based ie an index of 0 is + invalid. This is due to the fact that a zero value + index signals that a completely unindexed header + follows. + + The entry will either be from the static table or + the dynamic table depending on the value of index. + """ + original_index = index + index -= 1 + if 0 <= index: + if index < HeaderTable.STATIC_TABLE_LENGTH: + return HeaderTable.STATIC_TABLE[index] + + index -= HeaderTable.STATIC_TABLE_LENGTH + if index < len(self.dynamic_entries): + return self.dynamic_entries[index] + + raise InvalidTableIndex("Invalid table index %d" % original_index) + + def __repr__(self): + return "HeaderTable(%d, %s, %r)" % ( + self._maxsize, + self.resized, + self.dynamic_entries + ) + + def add(self, name, value): + """ + Adds a new entry to the table + + We reduce the table size if the entry will make the + table size greater than maxsize. + """ + # We just clear the table if the entry is too big + size = table_entry_size(name, value) + if size > self._maxsize: + self.dynamic_entries.clear() + self._current_size = 0 + else: + # Add new entry + self.dynamic_entries.appendleft((name, value)) + self._current_size += size + self._shrink() + + def search(self, name, value): + """ + Searches the table for the entry specified by name + and value + + Returns one of the following: + - ``None``, no match at all + - ``(index, name, None)`` for partial matches on name only. + - ``(index, name, value)`` for perfect matches. + """ + offset = HeaderTable.STATIC_TABLE_LENGTH + 1 + partial = None + for (i, (n, v)) in enumerate(HeaderTable.STATIC_TABLE): + if n == name: + if v == value: + return i + 1, n, v + elif partial is None: + partial = (i + 1, n, None) + for (i, (n, v)) in enumerate(self.dynamic_entries): + if n == name: + if v == value: + return i + offset, n, v + elif partial is None: + partial = (i + offset, n, None) + return partial + + @property + def maxsize(self): + return self._maxsize + + @maxsize.setter + def maxsize(self, newmax): + newmax = int(newmax) + log.debug("Resizing header table to %d from %d", newmax, self._maxsize) + oldmax = self._maxsize + self._maxsize = newmax + self.resized = (newmax != oldmax) + if newmax <= 0: + self.dynamic_entries.clear() + self._current_size = 0 + elif oldmax > newmax: + self._shrink() + + def _shrink(self): + """ + Shrinks the dynamic table to be at or below maxsize + """ + cursize = self._current_size + while cursize > self._maxsize: + name, value = self.dynamic_entries.pop() + cursize -= table_entry_size(name, value) + log.debug("Evicting %s: %s from the header table", name, value) + self._current_size = cursize diff --git a/py3/lib/python3.6/site-packages/hypercorn/__about__.py b/py3/lib/python3.6/site-packages/hypercorn/__about__.py new file mode 100644 index 0000000..6b27eee --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/__about__.py @@ -0,0 +1 @@ +__version__ = "0.5.4" diff --git a/py3/lib/python3.6/site-packages/hypercorn/__init__.py b/py3/lib/python3.6/site-packages/hypercorn/__init__.py new file mode 100644 index 0000000..1e68d98 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/__init__.py @@ -0,0 +1,5 @@ +from .__about__ import __version__ +from .asyncio.run import run_single +from .config import Config + +__all__ = ("__version__", "Config", "run_single") diff --git a/py3/lib/python3.6/site-packages/hypercorn/__main__.py b/py3/lib/python3.6/site-packages/hypercorn/__main__.py new file mode 100644 index 0000000..412afc2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/__main__.py @@ -0,0 +1,181 @@ +import argparse +import ssl +import sys +import warnings +from typing import List, Optional + +from .config import Config +from .run import run + +sentinel = object() + + +def _load_config(config_path: Optional[str]) -> Config: + if config_path is None: + return Config() + elif config_path.startswith("python:"): + return Config.from_pyfile(config_path[len("python:") :]) + else: + return Config.from_toml(config_path) + + +def main(sys_args: Optional[List[str]] = None) -> None: + parser = argparse.ArgumentParser() + parser.add_argument( + "application", help="The application to dispatch to as path.to.module:instance.path" + ) + parser.add_argument( + "--access-log", + help="The target location for the access log, use `-` for stdout", + default=sentinel, + ) + parser.add_argument( + "--access-logformat", + help="The log format for the access log, see help docs", + default=sentinel, + ) + parser.add_argument( + "--backlog", help="The maximum number of pending connections", type=int, default=sentinel + ) + parser.add_argument( + "-b", + "--bind", + dest="binds", + help=""" The host/address to bind to. Should be either host:port, host, + unix:path or fd://num, e.g. 127.0.0.1:5000, 127.0.0.1, + unix:/tmp/socket or fd://33 respectively. """, + default=[], + action="append", + ) + parser.add_argument("--ca-certs", help="Path to the SSL CA certificate file", default=sentinel) + parser.add_argument("--certfile", help="Path to the SSL certificate file", default=sentinel) + parser.add_argument("--cert-reqs", help="See verify mode argument", type=int, default=sentinel) + parser.add_argument("--ciphers", help="Ciphers to use for the SSL setup", default=sentinel) + parser.add_argument( + "-c", + "--config", + help="Location of a TOML config file or when prefixed with `python:` a Python file.", + default=None, + ) + parser.add_argument( + "--debug", + help="Enable debug mode, i.e. extra logging and checks", + action="store_true", + default=sentinel, + ) + parser.add_argument( + "--error-log", + help="The target location for the error log, use `-` for stderr", + default=sentinel, + ) + parser.add_argument( + "-k", + "--worker-class", + dest="worker_class", + help="The type of worker to use. " + "Options include asyncio, uvloop (pip install hypercorn[uvloop]), " + "and trio (pip install hypercorn[trio]).", + default=sentinel, + ) + parser.add_argument( + "--keep-alive", + help="Seconds to keep inactive connections alive for", + default=sentinel, + type=int, + ) + parser.add_argument("--keyfile", help="Path to the SSL key file", default=sentinel) + parser.add_argument( + "-p", "--pid", help="Location to write the PID (Program ID) to.", default=sentinel + ) + parser.add_argument( + "--reload", + help="Enable automatic reloads on code changes", + action="store_true", + default=sentinel, + ) + parser.add_argument( + "--root-path", help="The setting for the ASGI root_path variable", default=sentinel + ) + parser.add_argument( + "--uvloop", + dest="uvloop", + help="Enable uvloop usage (Deprecated, use `--worker-class uvloop` instead)", + action="store_true", + default=sentinel, + ) + + def _convert_verify_mode(value: str) -> ssl.VerifyMode: # type: ignore + try: + return ssl.VerifyMode[value] # type: ignore + except KeyError: + raise argparse.ArgumentTypeError("Not a valid verify mode") + + parser.add_argument( + "--verify-mode", + help="SSL verify mode for peer's certificate, see ssl.VerifyMode enum for possible values.", + type=_convert_verify_mode, + default=sentinel, + ) + parser.add_argument( + "-w", + "--workers", + dest="workers", + help="The number of workers to spawn and use", + default=sentinel, + type=int, + ) + args = parser.parse_args(sys_args or sys.argv[1:]) + config = _load_config(args.config) + config.application_path = args.application + + if args.access_logformat is not sentinel: + config.access_log_format = args.access_logformat + if args.access_log is not sentinel: + config.access_log_target = args.access_log + if args.backlog is not sentinel: + config.backlog = args.backlog + if args.ca_certs is not sentinel: + config.ca_certs = args.ca_certs + if args.certfile is not sentinel: + config.certfile = args.certfile + if args.cert_reqs is not sentinel: + config.cert_reqs = args.cert_reqs + if args.ciphers is not sentinel: + config.ciphers = args.ciphers + if args.debug is not sentinel: + config.debug = args.debug + if args.error_log is not sentinel: + config.error_log_target = args.error_log + if args.keep_alive is not sentinel: + config.keep_alive_timeout = args.keep_alive + if args.keyfile is not sentinel: + config.keyfile = args.keyfile + if args.pid is not sentinel: + config.pid_path = args.pid + if args.root_path is not sentinel: + config.root_path = args.root_path + if args.reload is not sentinel: + config.use_reloader = args.reload + if args.uvloop is not sentinel: + warnings.warn( + "The uvloop argument is deprecated, use `--worker-class uvloop` instead", + DeprecationWarning, + ) + config.worker_class = "uvloop" + if args.worker_class is not sentinel: + config.worker_class = args.worker_class + if args.workers is not sentinel: + config.workers = args.workers + + scheme = "https" if config.ssl_enabled else "http" + if len(args.binds) > 0: + config.bind = args.binds + + for bind in config.bind: + print(f"Running on {bind} over {scheme} (CTRL + C to quit)") # noqa: T001 + + run(config) + + +if __name__ == "__main__": + main() diff --git a/py3/lib/python3.6/site-packages/hypercorn/asgi/__init__.py b/py3/lib/python3.6/site-packages/hypercorn/asgi/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/py3/lib/python3.6/site-packages/hypercorn/asgi/h11.py b/py3/lib/python3.6/site-packages/hypercorn/asgi/h11.py new file mode 100644 index 0000000..3479922 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asgi/h11.py @@ -0,0 +1,152 @@ +import asyncio +from itertools import chain +from time import time +from typing import List, Optional, Tuple, Type +from urllib.parse import unquote + +import h11 + +from .run import H2CProtocolRequired, H2ProtocolAssumed, WebsocketProtocolRequired +from .utils import ASGIHTTPState, build_and_validate_headers, UnexpectedMessage +from ..config import Config +from ..typing import ASGIFramework, H11SendableEvent +from ..utils import suppress_body + + +class H11Mixin: + # This handles a h11 request in the ASGI system, all I/O + # (including when to close) should be handled by the actual worker + # rather than this class. + + app: Type[ASGIFramework] + client: Tuple[str, int] + config: Config + response: Optional[dict] + server: Tuple[str, int] + state: ASGIHTTPState + + @property + def scheme(self) -> str: + pass + + def response_headers(self) -> List[Tuple[bytes, bytes]]: + pass + + async def asend(self, event: H11SendableEvent) -> None: + pass + + async def asgi_put(self, message: dict) -> None: + """Called by the ASGI server to put a message to the ASGI instance. + + See asgi_receive as the get to this put. + """ + pass + + async def asgi_receive(self) -> dict: + """Called by the ASGI instance to receive a message.""" + pass + + def error_response(self, status_code: int) -> h11.Response: + return h11.Response( + status_code=status_code, + headers=chain( + [(b"content-length", b"0"), (b"connection", b"close")], self.response_headers() + ), + ) + + def raise_if_upgrade(self, event: h11.Request, trailing_data: bytes) -> None: + upgrade_value = "" + connection_value = "" + has_body = False + for name, value in event.headers: + sanitised_name = name.decode().strip().lower() + if sanitised_name == "upgrade": + upgrade_value = value.decode().strip() + elif sanitised_name == "connection": + connection_value = value.decode().strip() + elif sanitised_name in {"content-length", "transfer-encoding"}: + has_body = True + + connection_tokens = connection_value.lower().split(",") + if ( + any(token.strip() == "upgrade" for token in connection_tokens) + and upgrade_value.lower() == "websocket" + and event.method.decode().upper() == "GET" + ): + raise WebsocketProtocolRequired(event) + # h2c Upgrade requests with a body are a pain as the body must + # be fully recieved in HTTP/1.1 before the upgrade response + # and HTTP/2 takes over, so Hypercorn ignores the upgrade and + # responds in HTTP/1.1. Use a preflight OPTIONS request to + # initiate the upgrade if really required (or just use h2). + elif upgrade_value.lower() == "h2c" and not has_body: + raise H2CProtocolRequired(event) + elif event.method == b"PRI" and event.target == b"*" and event.http_version == b"2.0": + raise H2ProtocolAssumed(b"PRI * HTTP/2.0\r\n\r\n" + trailing_data) + + async def handle_request(self, request: h11.Request) -> None: + path, _, query_string = request.target.partition(b"?") + self.scope = { + "type": "http", + "http_version": request.http_version.decode(), + "asgi": {"version": "2.0"}, + "method": request.method.decode().upper(), + "scheme": self.scheme, + "path": unquote(path.decode("ascii")), + "query_string": query_string, + "root_path": self.config.root_path, + "headers": request.headers, + "client": self.client, + "server": self.server, + } + await self.handle_asgi_app() + + async def handle_asgi_app(self) -> None: + start_time = time() + try: + asgi_instance = self.app(self.scope) + await asgi_instance(self.asgi_receive, self.asgi_send) + except asyncio.CancelledError: + pass + except Exception: + if self.config.error_logger is not None: + self.config.error_logger.exception("Error in ASGI Framework") + + # If the application hasn't sent a response, it has errored - + # send a 500 for it. + if self.state == ASGIHTTPState.REQUEST: + await self.asend(self.error_response(500)) + await self.asend(h11.EndOfMessage()) + self.response = {"status": 500, "headers": []} + + self.config.access_logger.access(self.scope, self.response, time() - start_time) + + async def asgi_send(self, message: dict) -> None: + """Called by the ASGI instance to send a message.""" + if message["type"] == "http.response.start" and self.state == ASGIHTTPState.REQUEST: + self.response = message + elif message["type"] == "http.response.body" and self.state in { + ASGIHTTPState.REQUEST, + ASGIHTTPState.RESPONSE, + }: + if self.state == ASGIHTTPState.REQUEST: + headers = build_and_validate_headers(self.response["headers"]) + headers.extend(self.response_headers()) + await self.asend( + h11.Response(status_code=int(self.response["status"]), headers=headers) + ) + self.state = ASGIHTTPState.RESPONSE + + if ( + not suppress_body(self.scope["method"], int(self.response["status"])) + and message.get("body", b"") != b"" + ): + await self.asend(h11.Data(data=bytes(message["body"]))) + + if not message.get("more_body", False): + if self.state != ASGIHTTPState.CLOSED: + await self.asend(h11.EndOfMessage()) + await self.asgi_put({"type": "http.disconnect"}) + self.state = ASGIHTTPState.CLOSED + else: + raise UnexpectedMessage(self.state, message["type"]) diff --git a/py3/lib/python3.6/site-packages/hypercorn/asgi/h2.py b/py3/lib/python3.6/site-packages/hypercorn/asgi/h2.py new file mode 100644 index 0000000..da5d382 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asgi/h2.py @@ -0,0 +1,315 @@ +import asyncio +from time import time +from typing import Callable, Iterable, List, Tuple, Type +from urllib.parse import unquote + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import wsproto.connection +import wsproto.events +import wsproto.extensions +import wsproto.frame_protocol +from wsproto.handshake import server_extensions_handshake +from wsproto.utilities import split_comma_header # Specifically to match wsproto expectations + +from .utils import ( + ASGIHTTPState, + ASGIWebsocketState, + build_and_validate_headers, + raise_if_subprotocol_present, + UnexpectedMessage, +) +from ..config import Config +from ..typing import ASGIFramework +from ..utils import suppress_body + + +class H2Event: + def __repr__(self) -> str: + kwarg_str = ", ".join( + f"{name}={value}" for name, value in self.__dict__.items() if name[0] != "_" + ) + return f"{self.__class__.__name__}({kwarg_str})" + + def __eq__(self, other: object) -> bool: + return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ + + def __ne__(self, other: object) -> bool: + return not self.__eq__(other) + + # This is an unhashable type. + __hash__ = None + + +class EndStream(H2Event): + pass + + +class Response(H2Event): + def __init__(self, headers: Iterable[Tuple[bytes, bytes]]) -> None: + super().__init__() + self.headers = headers + + +class Data(H2Event): + def __init__(self, data: bytes) -> None: + super().__init__() + self.data = data + + +class ServerPush(H2Event): + def __init__(self, path: str, headers: Iterable[Tuple[bytes, bytes]]) -> None: + super().__init__() + self.path = path + self.headers = headers + + +class H2HTTPStreamMixin: + app: Type[ASGIFramework] + asend: Callable + config: Config + response: dict + state: ASGIHTTPState + + async def asgi_receive(self) -> dict: + """Called by the ASGI instance to receive a message.""" + pass + + async def handle_request( + self, + event: h2.events.RequestReceived, + scheme: str, + client: Tuple[str, int], + server: Tuple[str, int], + ) -> None: + headers = [] + for name, value in event.headers: + if name == b":method": + method = value.decode("ascii").upper() + elif name == b":path": + raw_path = value + headers.append((name, value)) + path, _, query_string = raw_path.partition(b"?") + self.scope = { + "type": "http", + "http_version": "2", + "asgi": {"version": "2.0"}, + "method": method, + "scheme": scheme, + "path": unquote(path.decode("ascii")), + "query_string": query_string, + "root_path": self.config.root_path, + "headers": headers, + "client": client, + "server": server, + "extensions": {"http.response.push": {}}, + } + await self.handle_asgi_app() + + async def handle_asgi_app(self) -> None: + start_time = time() + try: + asgi_instance = self.app(self.scope) + await asgi_instance(self.asgi_receive, self.asgi_send) + except asyncio.CancelledError: + pass + except Exception: + if self.config.error_logger is not None: + self.config.error_logger.exception("Error in ASGI Framework") + + # If the application hasn't sent a response, it has errored - + # send a 500 for it. + if self.state == ASGIHTTPState.REQUEST: + headers = [(b":status", b"500")] + await self.asend(Response(headers)) + await self.asend(EndStream()) + self.response = {"status": 500, "headers": []} + + self.config.access_logger.access(self.scope, self.response, time() - start_time) + + async def asgi_send(self, message: dict) -> None: + if message["type"] == "http.response.start" and self.state == ASGIHTTPState.REQUEST: + self.response = message + elif message["type"] == "http.response.push": + if not isinstance(message["path"], str): + raise TypeError(f"{message['path']} should be a str") + headers = build_and_validate_headers(message["headers"]) + await self.asend(ServerPush(message["path"], headers)) + elif message["type"] == "http.response.body" and self.state in { + ASGIHTTPState.REQUEST, + ASGIHTTPState.RESPONSE, + }: + if self.state == ASGIHTTPState.REQUEST: + headers = [(b":status", b"%d" % self.response["status"])] + headers.extend(build_and_validate_headers(self.response["headers"])) + await self.asend(Response(headers)) + self.state = ASGIHTTPState.RESPONSE + if ( + not suppress_body(self.scope["method"], self.response["status"]) + and message.get("body", b"") != b"" + ): + await self.asend(Data(bytes(message.get("body", b"")))) + if not message.get("more_body", False): + if self.state != ASGIHTTPState.CLOSED: + await self.asend(EndStream()) + else: + raise UnexpectedMessage(self.state, message["type"]) + + +class H2WebsocketStreamMixin: + app: Type[ASGIFramework] + asend: Callable + config: Config + connection: wsproto.connection.Connection + response: dict + state: ASGIWebsocketState + + async def asgi_put(self, message: dict) -> None: + """Called by the ASGI server to put a message to the ASGI instance. + + See asgi_receive as the get to this put. + """ + pass + + async def asgi_receive(self) -> dict: + """Called by the ASGI instance to receive a message.""" + pass + + async def handle_request( + self, + event: h2.events.RequestReceived, + scheme: str, + client: Tuple[str, int], + server: Tuple[str, int], + ) -> None: + headers = [] + for name, value in event.headers: + if name == b":path": + raw_path = value + headers.append((name, value)) + path, _, query_string = raw_path.partition(b"?") + self.scope = { + "type": "websocket", + "asgi": {"version": "2.0"}, + # RFC 8441 (HTTP/2) Says use http or https, ASGI says ws or wss + "scheme": "wss" if scheme == "https" else "ws", + "http_version": "2", + "path": unquote(path.decode("ascii")), + "query_string": query_string, + "root_path": self.config.root_path, + "headers": headers, + "client": client, + "server": server, + "subprotocols": [], + "extensions": {"websocket.http.response": {}}, + } + self.state = ASGIWebsocketState.HANDSHAKE + await self.handle_asgi_app() + + async def send_http_error(self, status: int) -> None: + headers = [(b":status", b"%d" % status)] + await self.asend(Response(headers=headers)) + await self.asend(EndStream()) + self.config.access_logger.access( + self.scope, {"status": status, "headers": []}, time() - self.start_time + ) + + async def handle_asgi_app(self) -> None: + self.start_time = time() + await self.asgi_put({"type": "websocket.connect"}) + try: + asgi_instance = self.app(self.scope) + await asgi_instance(self.asgi_receive, self.asgi_send) + except asyncio.CancelledError: + pass + except Exception: + if self.config.error_logger is not None: + self.config.error_logger.exception("Error in ASGI Framework") + + if self.state == ASGIWebsocketState.CONNECTED: + await self.asend( + Data( + self.connection.send( + wsproto.events.CloseConnection( + code=wsproto.frame_protocol.CloseReason.ABNORMAL_CLOSURE + ) + ) + ) + ) + self.state = ASGIWebsocketState.CLOSED + + # If the application hasn't accepted the connection (or sent a + # response) send a 500 for it. Otherwise if the connection + # hasn't been closed then close it. + if self.state == ASGIWebsocketState.HANDSHAKE: + await self.send_http_error(500) + self.state = ASGIWebsocketState.HTTPCLOSED + + async def asgi_send(self, message: dict) -> None: + if message["type"] == "websocket.accept" and self.state == ASGIWebsocketState.HANDSHAKE: + self.state = ASGIWebsocketState.CONNECTED + extensions: List[str] = [] + for name, value in self.scope["headers"]: + if name == b"sec-websocket-extensions": + extensions = split_comma_header(value) + supported_extensions = [wsproto.extensions.PerMessageDeflate()] + accepts = server_extensions_handshake(extensions, supported_extensions) + headers = [(b":status", b"200")] + headers.extend(build_and_validate_headers(message.get("headers", []))) + raise_if_subprotocol_present(headers) + if message.get("subprotocol") is not None: + headers.append((b"sec-websocket-protocol", message["subprotocol"].encode())) + if accepts: + headers.append((b"sec-websocket-extensions", accepts)) + await self.asend(Response(headers)) + self.connection = wsproto.connection.Connection( + wsproto.connection.ConnectionType.SERVER, supported_extensions + ) + self.config.access_logger.access( + self.scope, {"status": 200, "headers": []}, time() - self.start_time + ) + elif ( + message["type"] == "websocket.http.response.start" + and self.state == ASGIWebsocketState.HANDSHAKE + ): + self.response = message + self.config.access_logger.access(self.scope, self.response, time() - self.start_time) + elif message["type"] == "websocket.http.response.body" and self.state in { + ASGIWebsocketState.HANDSHAKE, + ASGIWebsocketState.RESPONSE, + }: + await self._asgi_send_rejection(message) + elif message["type"] == "websocket.send" and self.state == ASGIWebsocketState.CONNECTED: + event: wsproto.events.Event + if message.get("bytes") is not None: + event = wsproto.events.BytesMessage(data=bytes(message["bytes"])) + elif not isinstance(message["text"], str): + raise TypeError(f"{message['text']} should be a str") + else: + event = wsproto.events.TextMessage(data=message["text"]) + await self.asend(Data(self.connection.send(event))) + elif message["type"] == "websocket.close" and self.state == ASGIWebsocketState.HANDSHAKE: + await self.send_http_error(403) + self.state = ASGIWebsocketState.HTTPCLOSED + elif message["type"] == "websocket.close": + data = self.connection.send(wsproto.events.CloseConnection(code=int(message["code"]))) + await self.asend(Data(data)) + self.state = ASGIWebsocketState.CLOSED + else: + raise UnexpectedMessage(self.state, message["type"]) + + async def _asgi_send_rejection(self, message: dict) -> None: + body_suppressed = suppress_body("GET", self.response["status"]) + if self.state == ASGIWebsocketState.HANDSHAKE: + headers = [(b":status", b"%d" % self.response["status"])] + headers.extend(build_and_validate_headers(self.response["headers"])) + await self.asend(Response(headers)) + self.state = ASGIWebsocketState.RESPONSE + if not body_suppressed and message.get("body", b"") != b"": + await self.asend(Data(bytes(message.get("body", b"")))) + if not message.get("more_body", False): + await self.asgi_put({"type": "websocket.disconnect"}) + await self.asend(EndStream()) + self.state = ASGIWebsocketState.HTTPCLOSED diff --git a/py3/lib/python3.6/site-packages/hypercorn/asgi/run.py b/py3/lib/python3.6/site-packages/hypercorn/asgi/run.py new file mode 100644 index 0000000..a1e1dc1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asgi/run.py @@ -0,0 +1,19 @@ +import h11 + + +class WrongProtocolError(Exception): + def __init__(self, request: h11.Request) -> None: + self.request = request + + +class WebsocketProtocolRequired(WrongProtocolError): + pass + + +class H2CProtocolRequired(WrongProtocolError): + pass + + +class H2ProtocolAssumed(WrongProtocolError): + def __init__(self, data: bytes) -> None: + self.data = data diff --git a/py3/lib/python3.6/site-packages/hypercorn/asgi/utils.py b/py3/lib/python3.6/site-packages/hypercorn/asgi/utils.py new file mode 100644 index 0000000..b6cd697 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asgi/utils.py @@ -0,0 +1,70 @@ +from enum import auto, Enum +from typing import List, Optional, Tuple, Union + +from wsproto.events import Message, TextMessage + + +class ASGIHTTPState(Enum): + # The ASGI Spec is clear that a response should not start till the + # framework has sent at least one body message hence why this + # state tracking is required. + REQUEST = auto() + RESPONSE = auto() + CLOSED = auto() + + +class ASGIWebsocketState(Enum): + # Hypercorn supports the ASGI websocket HTTP response extension, + # which allows HTTP responses rather than acceptance. + HANDSHAKE = auto() + CONNECTED = auto() + RESPONSE = auto() + CLOSED = auto() + HTTPCLOSED = auto() + + +class UnexpectedMessage(Exception): + def __init__(self, state: Enum, message_type: str) -> None: + super().__init__(f"Unexpected message type, {message_type} given the state {state}") + + +class FrameTooLarge(Exception): + pass + + +class WebsocketBuffer: + def __init__(self, max_length: int) -> None: + self.value: Optional[Union[bytes, str]] = None + self.max_length = max_length + + def extend(self, event: Message) -> None: + if self.value is None: + if isinstance(event, TextMessage): + self.value = "" + else: + self.value = b"" + self.value += event.data # type: ignore + if len(self.value) > self.max_length: + raise FrameTooLarge() + + def clear(self) -> None: + self.value = None + + def to_message(self) -> dict: + return { + "type": "websocket.receive", + "bytes": self.value if isinstance(self.value, bytes) else None, + "text": self.value if isinstance(self.value, str) else None, + } + + +def build_and_validate_headers(headers: List[Tuple[bytes, bytes]]) -> List[Tuple[bytes, bytes]]: + # Validates that the header name and value are bytes + return [(bytes(name).lower().strip(), bytes(value).strip()) for name, value in headers] + + +def raise_if_subprotocol_present(headers: List[Tuple[bytes, bytes]]) -> None: + # The headers should be built and validated first. + for name, value in headers: + if name == b"sec-websocket-protocol": + raise Exception("Invalid header, use the subprotocol option instead") diff --git a/py3/lib/python3.6/site-packages/hypercorn/asgi/wsproto.py b/py3/lib/python3.6/site-packages/hypercorn/asgi/wsproto.py new file mode 100644 index 0000000..20f23ae --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asgi/wsproto.py @@ -0,0 +1,176 @@ +import asyncio +from time import time +from typing import List, Optional, Tuple, Type, Union +from urllib.parse import unquote + +from wsproto.events import ( + AcceptConnection, + BytesMessage, + CloseConnection, + Event, + RejectConnection, + RejectData, + Request, + TextMessage, +) +from wsproto.extensions import PerMessageDeflate +from wsproto.frame_protocol import CloseReason + +from .utils import ( + ASGIWebsocketState, + build_and_validate_headers, + raise_if_subprotocol_present, + UnexpectedMessage, +) +from ..config import Config +from ..typing import ASGIFramework +from ..utils import suppress_body + + +class WebsocketMixin: + app: Type[ASGIFramework] + client: Tuple[str, int] + config: Config + response: Optional[dict] + server: Tuple[str, int] + start_time: float + state: ASGIWebsocketState + + @property + def scheme(self) -> str: + pass + + def response_headers(self) -> List[Tuple[bytes, bytes]]: + pass + + async def asend(self, event: Event) -> None: + pass + + async def asgi_put(self, message: dict) -> None: + """Called by the ASGI server to put a message to the ASGI instance. + + See asgi_receive as the get to this put. + """ + pass + + async def asgi_receive(self) -> dict: + """Called by the ASGI instance to receive a message.""" + pass + + async def handle_websocket(self, event: Request) -> None: + path, _, query_string = event.target.partition("?") + headers = [(b"host", event.host.encode())] + headers.extend(event.extra_headers) + self.scope = { + "type": "websocket", + "asgi": {"version": "2.0"}, + "http_version": "1.1", + "scheme": self.scheme, + "path": unquote(path), + "query_string": query_string.encode("ascii"), + "root_path": self.config.root_path, + "headers": headers, + "client": self.client, + "server": self.server, + "subprotocols": event.subprotocols, + "extensions": {"websocket.http.response": {}}, + } + await self.handle_asgi_app(event) + + async def send_http_error(self, status: int) -> None: + await self.asend(RejectConnection(status_code=status, headers=self.response_headers())) + self.config.access_logger.access( + self.scope, {"status": status, "headers": []}, time() - self.start_time + ) + + async def handle_asgi_app(self, event: Request) -> None: + self.start_time = time() + await self.asgi_put({"type": "websocket.connect"}) + try: + asgi_instance = self.app(self.scope) + await asgi_instance(self.asgi_receive, self.asgi_send) + except asyncio.CancelledError: + pass + except Exception: + if self.config.error_logger is not None: + self.config.error_logger.exception("Error in ASGI Framework") + + if self.state == ASGIWebsocketState.CONNECTED: + await self.asend(CloseConnection(code=CloseReason.ABNORMAL_CLOSURE)) + self.state = ASGIWebsocketState.CLOSED + + # If the application hasn't accepted the connection (or sent a + # response) send a 500 for it. Otherwise if the connection + # hasn't been closed then close it. + if self.state == ASGIWebsocketState.HANDSHAKE: + await self.send_http_error(500) + self.state = ASGIWebsocketState.HTTPCLOSED + + async def asgi_send(self, message: dict) -> None: + """Called by the ASGI instance to send a message.""" + if message["type"] == "websocket.accept" and self.state == ASGIWebsocketState.HANDSHAKE: + headers = build_and_validate_headers(message.get("headers", [])) + raise_if_subprotocol_present(headers) + headers.extend(self.response_headers()) + await self.asend( + AcceptConnection( + extensions=[PerMessageDeflate()], + extra_headers=headers, + subprotocol=message.get("subprotocol"), + ) + ) + self.state = ASGIWebsocketState.CONNECTED + self.config.access_logger.access( + self.scope, {"status": 101, "headers": []}, time() - self.start_time + ) + elif ( + message["type"] == "websocket.http.response.start" + and self.state == ASGIWebsocketState.HANDSHAKE + ): + self.response = message + self.config.access_logger.access(self.scope, self.response, time() - self.start_time) + elif message["type"] == "websocket.http.response.body" and self.state in { + ASGIWebsocketState.HANDSHAKE, + ASGIWebsocketState.RESPONSE, + }: + await self._asgi_send_rejection(message) + elif message["type"] == "websocket.send" and self.state == ASGIWebsocketState.CONNECTED: + data: Union[bytes, str] + if message.get("bytes") is not None: + await self.asend(BytesMessage(data=bytes(message["bytes"]))) + elif not isinstance(message["text"], str): + raise TypeError(f"{message['text']} should be a str") + else: + await self.asend(TextMessage(data=message["text"])) + elif message["type"] == "websocket.close" and self.state == ASGIWebsocketState.HANDSHAKE: + await self.send_http_error(403) + self.state = ASGIWebsocketState.HTTPCLOSED + elif message["type"] == "websocket.close": + await self.asend(CloseConnection(code=int(message["code"]))) + self.state = ASGIWebsocketState.CLOSED + else: + raise UnexpectedMessage(self.state, message["type"]) + + async def _asgi_send_rejection(self, message: dict) -> None: + body_suppressed = suppress_body("GET", self.response["status"]) + if self.state == ASGIWebsocketState.HANDSHAKE: + headers = build_and_validate_headers(self.response["headers"]) + headers.extend(self.response_headers()) + await self.asend( + RejectConnection( + status_code=int(self.response["status"]), + headers=headers, + has_body=not body_suppressed, + ) + ) + self.state = ASGIWebsocketState.RESPONSE + if not body_suppressed: + await self.asend( + RejectData( + data=bytes(message.get("body", b"")), + body_finished=not message.get("more_body", False), + ) + ) + if not message.get("more_body", False): + await self.asgi_put({"type": "websocket.disconnect"}) + self.state = ASGIWebsocketState.HTTPCLOSED diff --git a/py3/lib/python3.6/site-packages/hypercorn/asyncio/__init__.py b/py3/lib/python3.6/site-packages/hypercorn/asyncio/__init__.py new file mode 100644 index 0000000..94fc46b --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asyncio/__init__.py @@ -0,0 +1,31 @@ +import warnings +from typing import Type + +from .run import worker_serve +from ..config import Config +from ..typing import ASGIFramework + + +async def serve(app: Type[ASGIFramework], config: Config) -> None: + """Serve an ASGI framework app given the config. + + This allows for a programmatic way to serve an ASGI framework, it + can be used via, + + .. code-block:: python + + asyncio.run(serve(app, config)) + + It is assumed that the event-loop is configured before calling + this function, therefore configuration values that relate to loop + setup or process setup are ignored. + + """ + if config.debug: + warnings.warn("The config `debug` has no affect when using serve", Warning) + if config.workers != 1: + warnings.warn("The config `workers` has no affect when using serve", Warning) + if config.worker_class != "asyncio": + warnings.warn("The config `worker_class` has no affect when using serve", Warning) + + await worker_serve(app, config) diff --git a/py3/lib/python3.6/site-packages/hypercorn/asyncio/base.py b/py3/lib/python3.6/site-packages/hypercorn/asyncio/base.py new file mode 100644 index 0000000..d60056a --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asyncio/base.py @@ -0,0 +1,82 @@ +import asyncio +from typing import List, Optional, Tuple + +from ..config import Config +from ..utils import parse_socket_addr, response_headers + + +class HTTPServer: + def __init__( + self, + loop: asyncio.AbstractEventLoop, + config: Config, + transport: asyncio.BaseTransport, + protocol: str, + ) -> None: + self.loop = loop + self.config = config + self.transport = transport + self.protocol = protocol + + self.closed = False + self._can_write = asyncio.Event(loop=loop) + self._can_write.set() + self.start_keep_alive_timeout() + + socket = self.transport.get_extra_info("socket") + self.client = parse_socket_addr(socket.family, socket.getpeername()) + self.server = parse_socket_addr(socket.family, socket.getsockname()) + self.ssl_info = self.transport.get_extra_info("ssl_object") + + def data_received(self, data: bytes) -> None: + # Called whenever data is received. + pass + + def eof_received(self) -> bool: + # Either received once or not at all, if the client signals + # the connection is closed from their side. Is not called for + # SSL connections. If it returns Falsey the connection is + # closed from our side. + return True + + def pause_writing(self) -> None: + # Will be called whenever the transport crosses the high-water + # mark. + self._can_write.clear() + + def resume_writing(self) -> None: + # Will be called whenever the transport drops back below the + # low-water mark. + self._can_write.set() + + def connection_lost(self, error: Optional[Exception]) -> None: + # This is called once when the connection is closed from our + # side with an argument of None, otherwise something else + # errored. + pass + + async def drain(self) -> None: + await self._can_write.wait() + + def write(self, data: bytes) -> None: + self.transport.write(data) # type: ignore + + def close(self) -> None: + self.transport.close() + self.resume_writing() + self.closed = True + self.stop_keep_alive_timeout() + + def response_headers(self) -> List[Tuple[bytes, bytes]]: + return response_headers(self.protocol) + + def start_keep_alive_timeout(self) -> None: + self._keep_alive_timeout_handle = self.loop.call_later( + self.config.keep_alive_timeout, self.handle_timeout + ) + + def stop_keep_alive_timeout(self) -> None: + self._keep_alive_timeout_handle.cancel() + + def handle_timeout(self) -> None: + self.close() diff --git a/py3/lib/python3.6/site-packages/hypercorn/asyncio/h11.py b/py3/lib/python3.6/site-packages/hypercorn/asyncio/h11.py new file mode 100644 index 0000000..21aacc2 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asyncio/h11.py @@ -0,0 +1,123 @@ +import asyncio +from typing import Optional, Type + +import h11 + +from .base import HTTPServer +from ..asgi.h11 import H11Mixin +from ..asgi.run import H2CProtocolRequired +from ..asgi.utils import ASGIHTTPState +from ..config import Config +from ..typing import ASGIFramework, H11SendableEvent + + +class H11Server(HTTPServer, H11Mixin): + def __init__( + self, + app: Type[ASGIFramework], + loop: asyncio.AbstractEventLoop, + config: Config, + transport: asyncio.BaseTransport, + ) -> None: + super().__init__(loop, config, transport, "h11") + self.app = app + self.connection = h11.Connection( + h11.SERVER, max_incomplete_event_size=self.config.h11_max_incomplete_size + ) + + self.app_queue: asyncio.Queue = asyncio.Queue(loop=loop) + self.response: Optional[dict] = None + self.scope: Optional[dict] = None + self.state = ASGIHTTPState.REQUEST + self.task: Optional[asyncio.Future] = None + + def connection_lost(self, error: Optional[Exception]) -> None: + if error is not None: + self.app_queue.put_nowait({"type": "http.disconnect"}) + self.connection.send_failed() # Set our state to error, prevents recycling + + def eof_received(self) -> bool: + self.data_received(b"") + return True + + def data_received(self, data: bytes) -> None: + self.connection.receive_data(data) + self.handle_events() + + def handle_events(self) -> None: + # Called on receipt of data or after recycling the connection + while True: + if self.connection.they_are_waiting_for_100_continue: + self.send( + h11.InformationalResponse(status_code=100, headers=self.response_headers()) + ) + try: + event = self.connection.next_event() + except h11.RemoteProtocolError: + self.send(self.error_response(400)) + self.send(h11.EndOfMessage()) + self.app_queue.put_nowait({"type": "http.disconnect"}) + self.close() + break + else: + if isinstance(event, h11.Request): + self.stop_keep_alive_timeout() + try: + self.raise_if_upgrade(event, self.connection.trailing_data[0]) + except H2CProtocolRequired as error: + self.send( + h11.InformationalResponse( + status_code=101, + headers=[(b"upgrade", b"h2c")] + self.response_headers(), + ) + ) + raise error + self.task = self.loop.create_task(self.handle_request(event)) + self.task.add_done_callback(self.recycle_or_close) + elif isinstance(event, h11.EndOfMessage): + self.app_queue.put_nowait( + {"type": "http.request", "body": b"", "more_body": False} + ) + elif isinstance(event, h11.Data): + self.app_queue.put_nowait( + {"type": "http.request", "body": event.data, "more_body": True} + ) + elif ( + isinstance(event, h11.ConnectionClosed) + or event is h11.NEED_DATA + or event is h11.PAUSED + ): + break + + def send(self, event: H11SendableEvent) -> None: + try: + self.write(self.connection.send(event)) # type: ignore + except h11.LocalProtocolError: + pass + + async def asend(self, event: H11SendableEvent) -> None: + self.send(event) + await self.drain() + + def recycle_or_close(self, future: asyncio.Future) -> None: + if self.connection.our_state is h11.DONE: + self.connection.start_next_cycle() + self.app_queue = asyncio.Queue(loop=self.loop) + self.response = None + self.scope = None + self.state = ASGIHTTPState.REQUEST + self.start_keep_alive_timeout() + self.handle_events() + else: # Either reached a good close state, or has errored + self.close() + + async def asgi_put(self, message: dict) -> None: + await self.app_queue.put(message) + + async def asgi_receive(self) -> dict: + """Called by the ASGI instance to receive a message.""" + return await self.app_queue.get() + + @property + def scheme(self) -> str: + return "https" if self.ssl_info is not None else "http" diff --git a/py3/lib/python3.6/site-packages/hypercorn/asyncio/h2.py b/py3/lib/python3.6/site-packages/hypercorn/asyncio/h2.py new file mode 100644 index 0000000..738955c --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asyncio/h2.py @@ -0,0 +1,355 @@ +import asyncio +from functools import partial +from itertools import chain +from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h11 +import wsproto +import wsproto.connection +import wsproto.events + +from .base import HTTPServer +from ..asgi.h2 import ( + Data, + EndStream, + H2Event, + H2HTTPStreamMixin, + H2WebsocketStreamMixin, + Response, + ServerPush, +) +from ..asgi.utils import ASGIHTTPState, ASGIWebsocketState +from ..config import Config +from ..typing import ASGIFramework, H2SyncStream + + +class H2HTTPStream(H2HTTPStreamMixin): + """A HTTP Stream.""" + + def __init__(self, app: Type[ASGIFramework], config: Config, asend: Callable) -> None: + self.app = app + self.config = config + self.response: Optional[dict] = None + self.scope: Optional[dict] = None + self.state = ASGIHTTPState.REQUEST + + self.asend = asend # type: ignore + self.to_app: asyncio.Queue = asyncio.Queue() + + def data_received(self, data: bytes) -> None: + self.to_app.put_nowait({"type": "http.request", "body": data, "more_body": True}) + + def ended(self) -> None: + self.to_app.put_nowait({"type": "http.request", "body": b"", "more_body": False}) + + def reset(self) -> None: + self.to_app.put_nowait({"type": "http.disconnect"}) + + def close(self) -> None: + self.to_app.put_nowait({"type": "http.disconnect"}) + + async def asgi_receive(self) -> dict: + return await self.to_app.get() + + +class H2WebsocketStream(H2WebsocketStreamMixin): + """A Websocket Stream.""" + + def __init__( + self, app: Type[ASGIFramework], config: Config, asend: Callable, send: Callable + ) -> None: + self.app = app + self.config = config + self.response: Optional[dict] = None + self.scope: Optional[dict] = None + self.state = ASGIWebsocketState.CONNECTED + self.connection: Optional[wsproto.connection.Connection] = None + + self.asend = asend # type: ignore + self.send = send + self.to_app: asyncio.Queue = asyncio.Queue() + + def data_received(self, data: bytes) -> None: + self.connection.receive_data(data) + for event in self.connection.events(): + if isinstance(event, wsproto.events.TextMessage): + self.to_app.put_nowait({"type": "websocket.receive", "text": event.data}) + elif isinstance(event, wsproto.events.BytesMessage): + self.to_app.put_nowait({"type": "websocket.receive", "bytes": event.data}) + elif isinstance(event, wsproto.events.Ping): + self.send(Data(self.connection.send(event.response()))) + elif isinstance(event, wsproto.events.CloseConnection): + if self.connection.state == wsproto.connection.ConnectionState.REMOTE_CLOSING: + self.send(Data(self.connection.send(event.response()))) + self.to_app.put_nowait({"type": "websocket.disconnect"}) + break + + def ended(self) -> None: + self.to_app.put_nowait({"type": "websocket.disconnect"}) + + def reset(self) -> None: + self.to_app.put_nowait({"type": "websocket.disconnect"}) + + def close(self) -> None: + self.to_app.put_nowait({"type": "websocket.disconnect"}) + + async def asgi_put(self, message: dict) -> None: + await self.to_app.put(message) + + async def asgi_receive(self) -> dict: + return await self.to_app.get() + + +class H2Server(HTTPServer): + def __init__( + self, + app: Type[ASGIFramework], + loop: asyncio.AbstractEventLoop, + config: Config, + transport: asyncio.BaseTransport, + *, + upgrade_request: Optional[h11.Request] = None, + received_data: Optional[bytes] = None, + ) -> None: + super().__init__(loop, config, transport, "h2") + self.app = app + self.streams: Dict[int, H2SyncStream] = {} # type: ignore + self.flow_control: Dict[int, asyncio.Event] = {} + + self.connection = h2.connection.H2Connection( + config=h2.config.H2Configuration(client_side=False, header_encoding=None) + ) + self.connection.DEFAULT_MAX_INBOUND_FRAME_SIZE = config.h2_max_inbound_frame_size + self.connection.local_settings = h2.settings.Settings( + client=False, + initial_values={ + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: config.h2_max_concurrent_streams, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: config.h2_max_header_list_size, + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL: 1, + }, + ) + + if upgrade_request is None: + self.connection.initiate_connection() + if received_data: + self.data_received(received_data) + else: + settings = "" + headers = [] + for name, value in upgrade_request.headers: + if name.lower() == b"http2-settings": + settings = value.decode() + elif name.lower() == b"host": + headers.append((b":authority", value)) + headers.append((name, value)) + headers.append((b":method", upgrade_request.method)) + headers.append((b":path", upgrade_request.target)) + self.connection.initiate_upgrade_connection(settings) + event = h2.events.RequestReceived() + event.stream_id = 1 + event.headers = headers + self.create_stream(event, complete=True) + self.flush() + + def connection_lost(self, error: Optional[Exception]) -> None: + if error is not None: + self.close() + + def eof_received(self) -> bool: + self.data_received(b"") + return True + + def data_received(self, data: bytes) -> None: + try: + events = self.connection.receive_data(data) + except h2.exceptions.ProtocolError: + self.flush() + self.close() + else: + self.handle_events(events) + + def close(self) -> None: + for stream in self.streams.values(): + stream.close() + super().close() + + def create_stream(self, event: h2.events.RequestReceived, *, complete: bool = False) -> None: + method: str + for name, value in event.headers: + if name == b":method": + method = value.decode("ascii").upper() + if method == "CONNECT": + self.streams[event.stream_id] = H2WebsocketStream( + self.app, + self.config, + partial(self.asend, event.stream_id), + partial(self.send, event.stream_id), + ) + else: + self.streams[event.stream_id] = H2HTTPStream( + self.app, self.config, partial(self.asend, event.stream_id) + ) + if complete: + self.streams[event.stream_id].ended() + self.loop.create_task(self.handle_request(event)) + + async def handle_request(self, event: h2.events.RequestReceived) -> None: + await self.streams[event.stream_id].handle_request( + event, self.scheme, self.client, self.server + ) + if ( + self.connection.state_machine.state is not h2.connection.ConnectionState.CLOSED + and event.stream_id in self.connection.streams + and not self.connection.streams[event.stream_id].closed + ): + # The connection is not closed and there has been an error + # preventing the stream from closing correctly. + self.connection.reset_stream(event.stream_id) + self.streams[event.stream_id].close() + del self.streams[event.stream_id] + if len(self.streams) == 0: + self.start_keep_alive_timeout() + + def handle_events(self, events: List[h2.events.Event]) -> None: + for event in events: + if isinstance(event, h2.events.RequestReceived): + self.stop_keep_alive_timeout() + self.create_stream(event) + elif isinstance(event, h2.events.DataReceived): + self.streams[event.stream_id].data_received(event.data) + self.connection.acknowledge_received_data( + event.flow_controlled_length, event.stream_id + ) + elif isinstance(event, h2.events.StreamReset): + self.streams[event.stream_id].reset() + elif isinstance(event, h2.events.StreamEnded): + self.streams[event.stream_id].ended() + elif isinstance(event, h2.events.WindowUpdated): + self.window_updated(event.stream_id) + elif isinstance(event, h2.events.ConnectionTerminated): + self.close() + return + self.flush() + + def flush(self) -> None: + data = self.connection.data_to_send() + if data != b"": + self.write(data) + + def send(self, stream_id: int, event: H2Event) -> None: + # Solely for websocket stream WSPing and WSClose data to be + # sent, need to find a better way. + connection_state = self.connection.state_machine.state + stream_state = self.connection.streams[stream_id].state_machine.state + if ( + connection_state == h2.connection.ConnectionState.CLOSED + or stream_state == h2.stream.StreamState.CLOSED + ): + return + if isinstance(event, Data): + self.connection.send_data(stream_id, event.data) + self.flush() + + async def asend(self, stream_id: int, event: H2Event) -> None: + connection_state = self.connection.state_machine.state + stream_state = self.connection.streams[stream_id].state_machine.state + if ( + connection_state == h2.connection.ConnectionState.CLOSED + or stream_state == h2.stream.StreamState.CLOSED + ): + return + if isinstance(event, Response): + self.connection.send_headers( + stream_id, event.headers + self.response_headers() # type: ignore + ) + self.flush() + elif isinstance(event, EndStream): + self.connection.end_stream(stream_id) + self.flush() + elif isinstance(event, Data): + await self.send_data(stream_id, event.data) + elif isinstance(event, ServerPush): + self.server_push(stream_id, event.path, event.headers) + + async def send_data(self, stream_id: int, data: bytes) -> None: + while True: + while not self.connection.local_flow_control_window(stream_id): + await self.wait_for_flow_control(stream_id) + + chunk_size = min(len(data), self.connection.local_flow_control_window(stream_id)) + chunk_size = min(chunk_size, self.connection.max_outbound_frame_size) + if chunk_size < 1: # Pathological client sending negative window sizes + continue + self.connection.send_data(stream_id, data[:chunk_size]) + await self.drain() + self.flush() + data = data[chunk_size:] + if not data: + break + + async def wait_for_flow_control(self, stream_id: int) -> None: + event = asyncio.Event() + self.flow_control[stream_id] = event + await event.wait() + + def window_updated(self, stream_id: Optional[int]) -> None: + if stream_id is None or stream_id == 0: + # Unblock all streams + stream_ids = list(self.flow_control.keys()) + for stream_id in stream_ids: + event = self.flow_control.pop(stream_id) + event.set() + elif stream_id is not None: + if stream_id in self.flow_control: + event = self.flow_control.pop(stream_id) + event.set() + + def server_push( + self, stream_id: int, path: str, headers: Iterable[Tuple[bytes, bytes]] + ) -> None: + push_stream_id = self.connection.get_next_available_stream_id() + stream = self.streams[stream_id] + for name, value in stream.scope["headers"]: + if name == b":authority": + authority = value + request_headers = [ + (name, value) + for name, value in chain( + [ + (b":method", b"GET"), + (b":path", path.encode()), + (b":scheme", stream.scope["scheme"].encode()), + (b":authority", authority), + ], + headers, + self.response_headers(), + ) + ] + try: + self.connection.push_stream( + stream_id=stream_id, + promised_stream_id=push_stream_id, + request_headers=request_headers, + ) + except h2.exceptions.ProtocolError: + # Client does not accept push promises or we are trying to + # push on a push promises request. + pass + else: + event = h2.events.RequestReceived() + event.stream_id = push_stream_id + event.headers = request_headers + self.create_stream(event, complete=True) + + @property + def scheme(self) -> str: + return "https" if self.ssl_info is not None else "http" + + def handle_timeout(self) -> None: + self.connection.close_connection() + self.flush() + super().handle_timeout() diff --git a/py3/lib/python3.6/site-packages/hypercorn/asyncio/lifespan.py b/py3/lib/python3.6/site-packages/hypercorn/asyncio/lifespan.py new file mode 100644 index 0000000..3f53def --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asyncio/lifespan.py @@ -0,0 +1,72 @@ +import asyncio +from typing import Type + +from ..config import Config +from ..typing import ASGIFramework + + +class UnexpectedMessage(Exception): + pass + + +class Lifespan: + def __init__(self, app: Type[ASGIFramework], config: Config) -> None: + self.app = app + self.config = config + self.startup = asyncio.Event() + self.shutdown = asyncio.Event() + self.app_queue: asyncio.Queue = asyncio.Queue() + self.supported = True + self._support_checked = asyncio.Event() + + async def handle_lifespan(self) -> None: + scope = {"type": "lifespan"} + try: + asgi_instance = self.app(scope) + except Exception: + self._support_checked.set() + self.supported = False + if self.config.error_logger is not None: + self.config.error_logger.warning( + "ASGI Framework Lifespan error, continuing without Lifespan support" + ) + else: + self._support_checked.set() + try: + await asgi_instance(self.asgi_receive, self.asgi_send) + except asyncio.CancelledError: + pass + except Exception: + if self.config.error_logger is not None: + self.config.error_logger.exception("Error in ASGI Framework") + + async def wait_for_startup(self) -> None: + await self._support_checked.wait() + if not self.supported: + if hasattr(self.app, "startup"): # Compatibility with Quart 0.6.X + await self.app.startup() # type: ignore + return + + await self.app_queue.put({"type": "lifespan.startup"}) + await asyncio.wait_for(self.startup.wait(), timeout=self.config.startup_timeout) + + async def wait_for_shutdown(self) -> None: + await self._support_checked.wait() + if not self.supported: + if hasattr(self.app, "cleanup"): # Compatibility with Quart 0.6.X + await self.app.cleanup() # type: ignore + return + + await self.app_queue.put({"type": "lifespan.shutdown"}) + await asyncio.wait_for(self.shutdown.wait(), timeout=self.config.shutdown_timeout) + + async def asgi_receive(self) -> dict: + return await self.app_queue.get() + + async def asgi_send(self, message: dict) -> None: + if message["type"] == "lifespan.startup.complete": + self.startup.set() + elif message["type"] == "lifespan.shutdown.complete": + self.shutdown.set() + else: + raise UnexpectedMessage(message["type"]) diff --git a/py3/lib/python3.6/site-packages/hypercorn/asyncio/run.py b/py3/lib/python3.6/site-packages/hypercorn/asyncio/run.py new file mode 100644 index 0000000..50d86b9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asyncio/run.py @@ -0,0 +1,279 @@ +import asyncio +import platform +import signal +import ssl +import warnings +from multiprocessing.synchronize import Event as EventType +from socket import socket +from typing import Any, Coroutine, List, Optional, Type + +from .base import HTTPServer +from .h2 import H2Server +from .h11 import H11Server +from .lifespan import Lifespan +from .wsproto import WebsocketServer +from ..asgi.run import H2CProtocolRequired, H2ProtocolAssumed, WebsocketProtocolRequired +from ..config import Config +from ..typing import ASGIFramework +from ..utils import ( + check_shutdown, + load_application, + MustReloadException, + observe_changes, + restart, + Shutdown, +) + +try: + from socket import AF_UNIX +except ImportError: + AF_UNIX = None + + +def _raise_shutdown(*args: Any) -> None: + raise Shutdown() + + +class Server(asyncio.Protocol): + def __init__( + self, app: Type[ASGIFramework], loop: asyncio.AbstractEventLoop, config: Config + ) -> None: + self.app = app + self.loop = loop + self.config = config + self._server: Optional[HTTPServer] = None + self._ssl_enabled = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + ssl_object = transport.get_extra_info("ssl_object") + if ssl_object is not None: + self._ssl_enabled = True + protocol = ssl_object.selected_alpn_protocol() + else: + protocol = "http/1.1" + + if protocol == "h2": + self._server = H2Server(self.app, self.loop, self.config, transport) + else: + self._server = H11Server(self.app, self.loop, self.config, transport) + + def connection_lost(self, exception: Exception) -> None: + self._server.connection_lost(exception) + + def data_received(self, data: bytes) -> None: + try: + self._server.data_received(data) + except WebsocketProtocolRequired as error: + self._server = WebsocketServer( + self.app, + self.loop, + self.config, + self._server.transport, + upgrade_request=error.request, + ) + except H2CProtocolRequired as error: + self._server = H2Server( + self.app, + self.loop, + self.config, + self._server.transport, + upgrade_request=error.request, + ) + except H2ProtocolAssumed as error: + self._server = H2Server( + self.app, self.loop, self.config, self._server.transport, received_data=error.data + ) + + def eof_received(self) -> bool: + if self._ssl_enabled: + # Returning anything other than False has no affect under + # SSL, and just raises an annoying warning. + return False + return self._server.eof_received() + + def pause_writing(self) -> None: + self._server.pause_writing() + + def resume_writing(self) -> None: + self._server.resume_writing() + + +async def _windows_signal_support() -> None: + # See https://bugs.python.org/issue23057, to catch signals on + # Windows it is necessary for an IO event to happen periodically. + while True: + await asyncio.sleep(1) + + +def run_single( + app: Type[ASGIFramework], config: Config, *, loop: asyncio.AbstractEventLoop +) -> None: + """Create a server to run the app on given the options. + + This has been deprecated in favour of serve from + hypercorn.asyncio it will be removed in 0.6.0. + + Arguments: + app: The ASGI Framework to run. + config: The configuration that defines the server. + loop: Asyncio loop to create the server in, if None, take default one. + + """ + warnings.warn("See `serve` from hypercorn.asyncio", DeprecationWarning) + if loop is None: + warnings.warn("Event loop is not specified, this can cause unexpected errors") + loop = asyncio.get_event_loop() + loop.run_until_complete(worker_serve(app, config)) + + +async def worker_serve( + app: Type[ASGIFramework], + config: Config, + *, + sockets: Optional[List[socket]] = None, + shutdown_event: Optional[EventType] = None, +) -> None: + lifespan = Lifespan(app, config) + lifespan_task = asyncio.ensure_future(lifespan.handle_lifespan()) + + await lifespan.wait_for_startup() + + ssl_context = config.create_ssl_context() + + if sockets is None: + sockets = config.create_sockets() + + loop = asyncio.get_event_loop() + tasks = [] + if platform.system() == "Windows": + tasks.append(loop.create_task(_windows_signal_support())) + else: + signal_event = asyncio.Event() + + def _signal_handler(*_: Any) -> None: # noqa: N803 + signal_event.set() + + try: + loop.add_signal_handler(signal.SIGINT, _signal_handler) + loop.add_signal_handler(signal.SIGTERM, _signal_handler) + except AttributeError: + pass + + async def _check_signal() -> None: + await signal_event.wait() + raise Shutdown() + + tasks.append(loop.create_task(_check_signal())) + + if shutdown_event is not None: + tasks.append(loop.create_task(check_shutdown(shutdown_event, asyncio.sleep))) + + if config.use_reloader: + tasks.append(loop.create_task(observe_changes(asyncio.sleep))) + + servers = [ + await loop.create_server( # type: ignore + lambda: Server(app, loop, config), backlog=config.backlog, ssl=ssl_context, sock=sock + ) + for sock in sockets + ] + + reload_ = False + try: + gathered_tasks = asyncio.gather(*tasks) + await gathered_tasks + except MustReloadException: + reload_ = True + except (Shutdown, KeyboardInterrupt): + pass + finally: + for server in servers: + server.close() + await server.wait_closed() + # Retrieve the Gathered Tasks Cancelled Exception, to + # prevent a warning that this hasn't been done. + gathered_tasks.exception() + + await lifespan.wait_for_shutdown() + lifespan_task.cancel() + await lifespan_task + + if reload_: + restart() + + +def asyncio_worker( + config: Config, + sockets: Optional[List[socket]] = None, + shutdown_event: Optional[EventType] = None, +) -> None: + app = load_application(config.application_path) + # With Python 3.7 switch to asyncio.run + _run( + worker_serve(app, config, sockets=sockets, shutdown_event=shutdown_event), + debug=config.debug, + ) + + +def uvloop_worker( + config: Config, + sockets: Optional[List[socket]] = None, + shutdown_event: Optional[EventType] = None, +) -> None: + try: + import uvloop + except ImportError as error: + raise Exception("uvloop is not installed") from error + else: + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + app = load_application(config.application_path) + # With Python 3.7 use asyncio.run + _run( + worker_serve(app, config, sockets=sockets, shutdown_event=shutdown_event), + debug=config.debug, + ) + + +def _run(main: Coroutine, *, debug: bool = False) -> None: + loop = asyncio.new_event_loop() + try: + asyncio.set_event_loop(loop) + loop.set_debug(debug) + loop.set_exception_handler(_exception_handler) + loop.run_until_complete(main) + finally: + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + asyncio.set_event_loop(None) + loop.close() + + +def _cancel_all_tasks(loop: asyncio.AbstractEventLoop) -> None: + tasks = [task for task in asyncio.Task.all_tasks(loop) if not task.done()] + if not tasks: + return + + for task in tasks: + task.cancel() + loop.run_until_complete(asyncio.gather(*tasks, loop=loop, return_exceptions=True)) + + for task in tasks: + if not task.cancelled() and task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during shutdown", + "exception": task.exception(), + "task": task, + } + ) + + +def _exception_handler(loop: asyncio.AbstractEventLoop, context: dict) -> None: + exception = context.get("exception") + if isinstance(exception, ssl.SSLError): + pass # Handshake failure + else: + loop.default_exception_handler(context) diff --git a/py3/lib/python3.6/site-packages/hypercorn/asyncio/wsproto.py b/py3/lib/python3.6/site-packages/hypercorn/asyncio/wsproto.py new file mode 100644 index 0000000..a8fd3a1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/asyncio/wsproto.py @@ -0,0 +1,104 @@ +import asyncio +from typing import Optional, Type + +import h11 +from wsproto import ConnectionType, WSConnection +from wsproto.connection import ConnectionState +from wsproto.events import CloseConnection, Event, Message, Ping, Request +from wsproto.frame_protocol import CloseReason + +from .base import HTTPServer +from ..asgi.utils import ASGIWebsocketState, FrameTooLarge, WebsocketBuffer +from ..asgi.wsproto import WebsocketMixin +from ..config import Config +from ..typing import ASGIFramework + + +class WebsocketServer(HTTPServer, WebsocketMixin): + def __init__( + self, + app: Type[ASGIFramework], + loop: asyncio.AbstractEventLoop, + config: Config, + transport: asyncio.BaseTransport, + *, + upgrade_request: Optional[h11.Request] = None, + ) -> None: + super().__init__(loop, config, transport, "wsproto") + self.stop_keep_alive_timeout() + self.app = app + self.connection = WSConnection(ConnectionType.SERVER) + + self.app_queue: asyncio.Queue = asyncio.Queue() + self.response: Optional[dict] = None + self.scope: Optional[dict] = None + self.state = ASGIWebsocketState.HANDSHAKE + self.task: Optional[asyncio.Future] = None + + self.buffer = WebsocketBuffer(self.config.websocket_max_message_size) + + if upgrade_request is not None: + self.connection.initiate_upgrade_connection( + upgrade_request.headers, upgrade_request.target + ) + self.handle_events() + + def connection_lost(self, error: Optional[Exception]) -> None: + if error is not None: + self.app_queue.put_nowait({"type": "websocket.disconnect"}) + + def eof_received(self) -> bool: + self.data_received(None) + return True + + def data_received(self, data: Optional[bytes]) -> None: + self.connection.receive_data(data) + self.handle_events() + + def handle_events(self) -> None: + for event in self.connection.events(): + if isinstance(event, Request): + self.task = self.loop.create_task(self.handle_websocket(event)) + self.task.add_done_callback(self.maybe_close) + elif isinstance(event, Message): + try: + self.buffer.extend(event) + except FrameTooLarge: + self.write( + self.connection.send(CloseConnection(code=CloseReason.MESSAGE_TOO_BIG)) + ) + self.app_queue.put_nowait({"type": "websocket.disconnect"}) + self.close() + break + + if event.message_finished: + self.app_queue.put_nowait(self.buffer.to_message()) + self.buffer.clear() + elif isinstance(event, Ping): + self.write(self.connection.send(event.response())) + elif isinstance(event, CloseConnection): + if self.connection.state == ConnectionState.REMOTE_CLOSING: + self.write(self.connection.send(event.response())) + self.app_queue.put_nowait({"type": "websocket.disconnect"}) + self.close() + break + + def maybe_close(self, future: asyncio.Future) -> None: + # Close the connection iff a HTTP response was sent + if self.state == ASGIWebsocketState.HTTPCLOSED: + self.close() + + async def asend(self, event: Event) -> None: + await self.drain() + self.write(self.connection.send(event)) + + async def asgi_put(self, message: dict) -> None: + await self.app_queue.put(message) + + async def asgi_receive(self) -> dict: + """Called by the ASGI instance to receive a message.""" + return await self.app_queue.get() + + @property + def scheme(self) -> str: + return "wss" if self.ssl_info is not None else "ws" diff --git a/py3/lib/python3.6/site-packages/hypercorn/config.py b/py3/lib/python3.6/site-packages/hypercorn/config.py new file mode 100644 index 0000000..bd7bb4b --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/config.py @@ -0,0 +1,319 @@ +import importlib +import importlib.util +import logging +import os +import socket +import ssl +import stat +import sys +import types +import warnings +from ssl import SSLContext, VerifyFlags, VerifyMode # type: ignore +from typing import Any, AnyStr, Dict, List, Mapping, Optional, Type, Union + +import pytoml + +from .logging import AccessLogger + +BYTES = 1 +OCTETS = 1 +SECONDS = 1.0 + +FilePath = Union[AnyStr, os.PathLike] + + +class Config: + + _access_logger: Optional[AccessLogger] = None + _error_log_target: Optional[str] = None + + access_log_format = "%(h)s %(r)s %(s)s %(b)s %(D)s" + access_log_target: Optional[str] = None + access_logger_class = AccessLogger + alpn_protocols = ["h2", "http/1.1"] + application_path: str + backlog = 100 + bind = ["127.0.0.1:8000"] + ca_certs: Optional[str] = None + certfile: Optional[str] = None + ciphers: str = "ECDHE+AESGCM" + debug = False + error_logger: Optional[logging.Logger] = None + h11_max_incomplete_size = 16 * 1024 * BYTES + h2_max_concurrent_streams = 100 + h2_max_header_list_size = 2 ** 16 + h2_max_inbound_frame_size = 2 ** 14 * OCTETS + keep_alive_timeout = 5 * SECONDS + keyfile: Optional[str] = None + pid_path: Optional[str] = None + root_path = "" + startup_timeout = 60 * SECONDS + shutdown_timeout = 60 * SECONDS + use_reloader = False + verify_flags: Optional[VerifyFlags] = None + verify_mode: Optional[VerifyMode] = None + websocket_max_message_size = 16 * 1024 * 1024 * BYTES + worker_class = "asyncio" + workers = 1 + + def set_cert_reqs(self, value: int) -> None: + warnings.warn("Please use verify_mode instead", Warning) + self.verify_mode = VerifyMode(value) + + cert_reqs = property(None, set_cert_reqs) + + @property + def host(self) -> str: + # Remove in 0.6.0 + warnings.warn("host is deprecated, please use bind instead", DeprecationWarning) + host, _ = self.bind[0].rsplit(":") + return host + + @host.setter + def host(self, value: str) -> None: + # Remove in 0.6.0 + warnings.warn("host is deprecated, please use bind instead", DeprecationWarning) + if self.bind: + host, port = self.bind[0].rsplit(":") + else: + port = "8000" + host = value + self.bind = [f"{host}:{port}"] + + @property + def port(self) -> int: + # Remove in 0.6.0 + warnings.warn("port is deprecated, please use bind instead", DeprecationWarning) + _, port = self.bind[0].rsplit(":") + return int(port) + + @port.setter + def port(self, value: int) -> None: + # Remove in 0.6.0 + warnings.warn("port is deprecated, please use bind instead", DeprecationWarning) + if self.bind: + host, port = self.bind[0].rsplit(":") + else: + host = "127.0.0.1" + port = str(value) + self.bind = [f"{host}:{port}"] + + def _set_file_descriptor(self, value: int) -> None: + # Remove in 0.6.0 + warnings.warn("file_descriptor is deprecated, please use bind instead", DeprecationWarning) + self.bind = [f"fd://{value}"] + + file_descriptor = property(None, _set_file_descriptor) + + def _set_unix_domain(self, value: str) -> None: + # Remove in 0.6.0 + warnings.warn("unix_domain is deprecated, please use bind instead", DeprecationWarning) + self.bind = [f"unix:{value}"] + + unix_domain = property(None, _set_unix_domain) + + @property + def access_logger(self) -> AccessLogger: + if self._access_logger is None: + self._access_logger = self.access_logger_class( + self.access_log_format, self.access_log_target + ) + return self._access_logger + + @access_logger.setter + def access_logger(self, value: logging.Logger) -> None: + self._access_logger = self.access_logger_class(self.access_log_format, value) + + @property + def error_log_target(self) -> Optional[str]: + return self._error_log_target + + @error_log_target.setter + def error_log_target(self, value: Optional[str]) -> None: + self._error_log_target = value + if self.error_log_target is not None: + self.error_logger = logging.getLogger("hypercorn.error") + if self.error_log_target == "-": + self.error_logger.addHandler(logging.StreamHandler(sys.stderr)) + else: + self.error_logger.addHandler(logging.FileHandler(self.error_log_target)) + self.error_logger.setLevel(logging.INFO) + + def create_ssl_context(self) -> Optional[SSLContext]: + if not self.ssl_enabled: + return None + + context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + context.set_ciphers(self.ciphers) + cipher_opts = 0 + for attr in ["OP_NO_SSLv2", "OP_NO_SSLv3", "OP_NO_TLSv1", "OP_NO_TLSv1_1"]: + if hasattr(ssl, attr): # To be future proof + cipher_opts |= getattr(ssl, attr) + context.options |= cipher_opts # RFC 7540 Section 9.2: MUST be TLS >=1.2 + context.options |= ssl.OP_NO_COMPRESSION # RFC 7540 Section 9.2.1: MUST disable compression + context.set_alpn_protocols(self.alpn_protocols) + try: + context.set_npn_protocols(self.alpn_protocols) + except NotImplementedError: + pass # NPN is not necessarily available + + context.load_cert_chain(certfile=self.certfile, keyfile=self.keyfile) + if self.ca_certs is not None: + context.load_verify_locations(self.ca_certs) + if self.verify_mode is not None: + context.verify_mode = self.verify_mode + if self.verify_flags is not None: + context.verify_flags = self.verify_flags + + return context + + @property + def ssl_enabled(self) -> bool: + return self.certfile is not None and self.keyfile is not None + + def create_sockets(self) -> List[socket.socket]: + sockets: List[socket.socket] = [] + for bind in self.bind: + binding: Any = None + if bind.startswith("unix:"): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + binding = bind[5:] + try: + if stat.S_ISSOCK(os.stat(binding).st_mode): + os.remove(binding) + except FileNotFoundError: + pass + elif bind.startswith("fd://"): + sock = socket.fromfd(int(bind[5:]), socket.AF_UNIX, socket.SOCK_STREAM) + else: + try: + value = bind.rsplit(":", 1) + host, port = value[0], int(value[1]) + except (ValueError, IndexError): + host, port = bind, 8000 + sock = socket.socket( + socket.AF_INET6 if ":" in host else socket.AF_INET, socket.SOCK_STREAM + ) + if self.workers > 1: + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) # type: ignore + except AttributeError: + pass + binding = (host, port) + + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if binding is not None: + sock.bind(binding) + sock.setblocking(False) + try: + sock.set_inheritable(True) # type: ignore + except AttributeError: + pass + sockets.append(sock) + return sockets + + @classmethod + def from_mapping( + cls: Type["Config"], mapping: Optional[Mapping[str, Any]] = None, **kwargs: Any + ) -> "Config": + """Create a configuration from a mapping. + + This allows either a mapping to be directly passed or as + keyword arguments, for example, + + .. code-block:: python + + config = {'keep_alive_timeout': 10} + Config.from_mapping(config) + Config.form_mapping(keep_alive_timeout=10) + + Arguments: + mapping: Optionally a mapping object. + kwargs: Optionally a collection of keyword arguments to + form a mapping. + """ + mappings: Dict[str, Any] = {} + if mapping is not None: + mappings.update(mapping) + mappings.update(kwargs) + config = cls() + for key, value in mappings.items(): + try: + setattr(config, key, value) + except AttributeError: + pass + + return config + + @classmethod + def from_pyfile(cls: Type["Config"], filename: FilePath) -> "Config": + """Create a configuration from a Python file. + + .. code-block:: python + + Config.from_pyfile('hypercorn_config.py') + + Arguments: + filename: The filename which gives the path to the file. + """ + file_path = os.fspath(filename) + spec = importlib.util.spec_from_file_location("module.name", file_path) # type: ignore + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) # type: ignore + return cls.from_object(module) + + @classmethod + def from_toml(cls: Type["Config"], filename: FilePath) -> "Config": + """Load the configuration values from a TOML formatted file. + + This allows configuration to be loaded as so + + .. code-block:: python + + Config.from_toml('config.toml') + + Arguments: + filename: The filename which gives the path to the file. + """ + file_path = os.fspath(filename) + with open(file_path) as file_: + data = pytoml.load(file_) + return cls.from_mapping(data) + + @classmethod + def from_object(cls: Type["Config"], instance: Union[object, str]) -> "Config": + """Create a configuration from a Python object. + + This can be used to reference modules or objects within + modules for example, + + .. code-block:: python + + Config.from_object('module') + Config.from_object('module.instance') + from module import instance + Config.from_object(instance) + + are valid. + + Arguments: + instance: Either a str referencing a python object or the + object itself. + + """ + if isinstance(instance, str): + try: + path, config = instance.rsplit(".", 1) + except ValueError: + path = instance + instance = importlib.import_module(instance) + else: + module = importlib.import_module(path) + instance = getattr(module, config) + + mapping = { + key: getattr(instance, key) + for key in dir(instance) + if not isinstance(getattr(instance, key), types.ModuleType) + } + return cls.from_mapping(mapping) diff --git a/py3/lib/python3.6/site-packages/hypercorn/logging.py b/py3/lib/python3.6/site-packages/hypercorn/logging.py new file mode 100644 index 0000000..50200fa --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/logging.py @@ -0,0 +1,82 @@ +import logging +import os +import sys +import time +from typing import Any, Optional, Union + + +class AccessLogger: + def __init__(self, log_format: str, target: Union[logging.Logger, str, None]) -> None: + self.logger: Optional[logging.Logger] = None + self.log_format = log_format + if isinstance(target, logging.Logger): + self.logger = target + elif target is not None: + self.logger = logging.getLogger("hypercorn.access") + self.logger.handlers = [] + if target == "-": + self.logger.addHandler(logging.StreamHandler(sys.stdout)) + else: + self.logger.addHandler(logging.FileHandler(target)) + self.logger.setLevel(logging.INFO) + + def access(self, request: dict, response: dict, request_time: float) -> None: + if self.logger is not None: + self.logger.info(self.log_format, AccessLogAtoms(request, response, request_time)) + + def __getattr__(self, name: str) -> Any: + if self.logger is None: + return lambda *_: None + else: + return getattr(self.logger, name) + + +class AccessLogAtoms(dict): + def __init__(self, request: dict, response: dict, request_time: float) -> None: + for name, value in request["headers"]: + self[f"{{{name.decode().lower()}}}i"] = value.decode() + for name, value in response["headers"]: + self[f"{{{name.decode().lower()}}}o"] = value.decode() + for name, value in os.environ.items(): + self[f"{{{name.lower()}}}e"] = value + protocol = request.get("http_version", "ws") + client = request.get("client") + if client is None: + remote_addr = None + elif len(client) == 2: + remote_addr = f"{client[0]}:{client[1]}" + elif len(client) == 1: + remote_addr = client[0] + else: # make sure not to throw UnboundLocalError + remote_addr = f"" + method = request.get("method", "GET") + self.update( + { + "h": remote_addr, + "l": "-", + "t": time.strftime("[%d/%b/%Y:%H:%M:%S %z]"), + "r": f"{method} {request['path']} {protocol}", + "s": response["status"], + "m": method, + "U": request["path"], + "q": request["query_string"].decode(), + "H": protocol, + "b": self["{Content-Length}o"], + "B": self["{Content-Length}o"], + "f": self["{Referer}i"], + "a": self["{User-Agent}i"], + "T": int(request_time), + "D": int(request_time * 1_000_000), + "L": f"{request_time:.6f}", + "p": f"<{os.getpid()}>", + } + ) + + def __getitem__(self, key: str) -> str: + try: + if key.startswith("{"): + return super().__getitem__(key.lower()) + else: + return super().__getitem__(key) + except KeyError: + return "-" diff --git a/py3/lib/python3.6/site-packages/hypercorn/py.typed b/py3/lib/python3.6/site-packages/hypercorn/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/py.typed @@ -0,0 +1 @@ +Marker diff --git a/py3/lib/python3.6/site-packages/hypercorn/run.py b/py3/lib/python3.6/site-packages/hypercorn/run.py new file mode 100644 index 0000000..0eaeec3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/run.py @@ -0,0 +1,78 @@ +import platform +import random +import signal +import time +from multiprocessing import Event, Process +from typing import Any + +from .config import Config +from .typing import WorkerFunc +from .utils import write_pid_file + + +def run(config: Config) -> None: + if config.pid_path is not None: + write_pid_file(config.pid_path) + + worker_func: WorkerFunc + if config.worker_class == "asyncio": + from .asyncio.run import asyncio_worker + + worker_func = asyncio_worker + elif config.worker_class == "uvloop": + from .asyncio.run import uvloop_worker + + worker_func = uvloop_worker + elif config.worker_class == "trio": + from .trio.run import trio_worker + + worker_func = trio_worker + else: + raise ValueError(f"No worker of class {config.worker_class} exists") + + if config.workers == 1: + worker_func(config) + else: + run_multiple(config, worker_func) + + +def run_multiple(config: Config, worker_func: WorkerFunc) -> None: + if config.use_reloader: + raise RuntimeError("Reloader can only be used with a single worker") + + sockets = config.create_sockets() + + processes = [] + + # Ignore SIGINT before creating the processes, so that they + # inherit the signal handling. This means that the shutdown + # function controls the shutdown. + signal.signal(signal.SIGINT, signal.SIG_IGN) + + shutdown_event = Event() + + for _ in range(config.workers): + process = Process( + target=worker_func, + kwargs={"config": config, "shutdown_event": shutdown_event, "sockets": sockets}, + ) + process.daemon = True + process.start() + processes.append(process) + if platform.system() == "Windows": + time.sleep(0.1 * random.random()) + + def shutdown(*args: Any) -> None: + shutdown_event.set() + + for signal_name in {"SIGINT", "SIGTERM", "SIGBREAK"}: + if hasattr(signal, signal_name): + signal.signal(getattr(signal, signal_name), shutdown) + + for process in processes: + process.join() + for process in processes: + process.terminate() + + for sock in sockets: + sock.close() diff --git a/py3/lib/python3.6/site-packages/hypercorn/trio/__init__.py b/py3/lib/python3.6/site-packages/hypercorn/trio/__init__.py new file mode 100644 index 0000000..facf57c --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/trio/__init__.py @@ -0,0 +1,38 @@ +import warnings +from typing import Type + +import trio + +from .run import worker_serve +from ..config import Config +from ..typing import ASGIFramework + + +async def serve( + app: Type[ASGIFramework], + config: Config, + *, + task_status: trio._core._run._TaskStatus = trio.TASK_STATUS_IGNORED, +) -> None: + """Serve an ASGI framework app given the config. + + This allows for a programmatic way to serve an ASGI framework, it + can be used via, + + .. code-block:: python + + trio.run(partial(serve, app, config)) + + It is assumed that the event-loop is configured before calling + this function, therefore configuration values that relate to loop + setup or process setup are ignored. + + """ + if config.debug: + warnings.warn("The config `debug` has no affect when using serve", Warning) + if config.workers != 1: + warnings.warn("The config `workers` has no affect when using serve", Warning) + if config.worker_class != "asyncio": + warnings.warn("The config `worker_class` has no affect when using serve", Warning) + + await worker_serve(app, config, task_status=task_status) diff --git a/py3/lib/python3.6/site-packages/hypercorn/trio/base.py b/py3/lib/python3.6/site-packages/hypercorn/trio/base.py new file mode 100644 index 0000000..53c115d --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/trio/base.py @@ -0,0 +1,31 @@ +from typing import List, Tuple + +import trio + +from ..utils import parse_socket_addr, response_headers + + +class HTTPServer: + def __init__(self, stream: trio.abc.Stream, protocol: str) -> None: + self.stream = stream + self.protocol = protocol + + self._is_ssl = isinstance(self.stream, trio.SSLStream) + if self._is_ssl: + socket = self.stream.transport_stream.socket + else: + socket = self.stream.socket + self.client = parse_socket_addr(socket.family, socket.getpeername()) + self.server = parse_socket_addr(socket.family, socket.getsockname()) + + async def aclose(self) -> None: + try: + await self.stream.send_eof() + except (trio.BrokenResourceError, AttributeError): + # They're already gone, nothing to do + # Or it is a SSL stream + return + await self.stream.aclose() + + def response_headers(self) -> List[Tuple[bytes, bytes]]: + return response_headers(self.protocol) diff --git a/py3/lib/python3.6/site-packages/hypercorn/trio/h11.py b/py3/lib/python3.6/site-packages/hypercorn/trio/h11.py new file mode 100644 index 0000000..e1a01c9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/trio/h11.py @@ -0,0 +1,142 @@ +from typing import Optional, Type + +import h11 +import trio + +from .base import HTTPServer +from ..asgi.h11 import H11Mixin +from ..asgi.run import H2CProtocolRequired, WrongProtocolError +from ..asgi.utils import ASGIHTTPState +from ..config import Config +from ..typing import ASGIFramework, H11SendableEvent + +MAX_RECV = 2 ** 16 + + +class MustCloseError(Exception): + pass + + +class H11Server(HTTPServer, H11Mixin): + def __init__(self, app: Type[ASGIFramework], config: Config, stream: trio.abc.Stream) -> None: + super().__init__(stream, "h11") + self.app = app + self.config = config + self.connection = h11.Connection( + h11.SERVER, max_incomplete_event_size=self.config.h11_max_incomplete_size + ) + self.response: Optional[dict] = None + self.scope: Optional[dict] = None + self.state = ASGIHTTPState.REQUEST + self.app_send_channel, self.app_receive_channel = trio.open_memory_channel(10) + + async def handle_connection(self) -> None: + try: + # Loop over the requests in order of receipt (either + # pipelined or due to keep-alive). + while True: + with trio.fail_after(self.config.keep_alive_timeout): + request = await self.read_request() + self.raise_if_upgrade(request, self.connection.trailing_data[0]) + + async with trio.open_nursery() as nursery: + nursery.start_soon(self.handle_request, request) + await self.read_body() + + await self.recycle_or_close() + except (trio.BrokenResourceError, trio.ClosedResourceError): + await self.asgi_put({"type": "http.disconnect"}) + await self.aclose() + except (trio.TooSlowError, MustCloseError): + await self.aclose() + except H2CProtocolRequired as error: + await self.asend( + h11.InformationalResponse( + status_code=101, headers=[(b"upgrade", b"h2c")] + self.response_headers() + ) + ) + raise error + except WrongProtocolError: + raise # Do not close the connection + + async def read_request(self) -> h11.Request: + while True: + try: + event = self.connection.next_event() + except h11.RemoteProtocolError: + await self.send_error() + raise MustCloseError() + else: + if event is h11.NEED_DATA: + await self.read_data() + elif isinstance(event, h11.Request): + return event + else: + raise MustCloseError() + + async def read_body(self) -> None: + while True: + try: + event = self.connection.next_event() + except h11.RemoteProtocolError: + await self.send_error() + await self.asgi_put({"type": "http.disconnect"}) + raise MustCloseError() + else: + if event is h11.NEED_DATA: + await self.read_data() + elif isinstance(event, h11.EndOfMessage): + await self.asgi_put({"type": "http.request", "body": b"", "more_body": False}) + return + elif isinstance(event, h11.Data): + await self.asgi_put( + {"type": "http.request", "body": event.data, "more_body": True} + ) + elif isinstance(event, h11.ConnectionClosed) or event is h11.PAUSED: + break + + async def recycle_or_close(self) -> None: + if self.connection.our_state is h11.DONE: + await self.app_send_channel.aclose() + await self.app_receive_channel.aclose() + self.connection.start_next_cycle() + self.app_send_channel, self.app_receive_channel = trio.open_memory_channel(10) + self.response = None + self.scope = None + self.state = ASGIHTTPState.REQUEST + else: + raise MustCloseError() + + async def asend(self, event: H11SendableEvent) -> None: + data = self.connection.send(event) + try: + await self.stream.send_all(data) + except trio.BrokenResourceError: + pass + + async def close(self) -> None: + await self.app_send_channel.aclose() + await self.app_receive_channel.aclose() + await super().aclose() + + async def read_data(self) -> None: + if self.connection.they_are_waiting_for_100_continue: + await self.asend( + h11.InformationalResponse(status_code=100, headers=self.response_headers()) + ) + data = await self.stream.receive_some(MAX_RECV) + self.connection.receive_data(data) + + async def send_error(self) -> None: + await self.asend(self.error_response(400)) + await self.asend(h11.EndOfMessage()) + + async def asgi_put(self, message: dict) -> None: + await self.app_send_channel.send(message) + + async def asgi_receive(self) -> dict: + return await self.app_receive_channel.receive() + + @property + def scheme(self) -> str: + return "https" if self._is_ssl else "http" diff --git a/py3/lib/python3.6/site-packages/hypercorn/trio/h2.py b/py3/lib/python3.6/site-packages/hypercorn/trio/h2.py new file mode 100644 index 0000000..60f43a1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/trio/h2.py @@ -0,0 +1,357 @@ +from functools import partial +from itertools import chain +from typing import Callable, Dict, Iterable, Optional, Tuple, Type + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h11 +import trio +import wsproto.connection +import wsproto.events + +from .base import HTTPServer +from ..asgi.h2 import ( + Data, + EndStream, + H2Event, + H2HTTPStreamMixin, + H2WebsocketStreamMixin, + Response, + ServerPush, +) +from ..asgi.utils import ASGIHTTPState, ASGIWebsocketState +from ..config import Config +from ..typing import ASGIFramework, H2AsyncStream + +MAX_RECV = 2 ** 16 + + +class MustCloseError(Exception): + pass + + +class H2HTTPStream(H2HTTPStreamMixin): + """A HTTP Stream.""" + + def __init__(self, app: Type[ASGIFramework], config: Config, asend: Callable) -> None: + self.app = app + self.config = config + self.response: Optional[dict] = None + self.scope: Optional[dict] = None + self.state = ASGIHTTPState.REQUEST + + self.asend = asend # type: ignore + self.app_send_channel, self.app_receive_channel = trio.open_memory_channel(10) + + async def data_received(self, data: bytes) -> None: + await self.app_send_channel.send({"type": "http.request", "body": data, "more_body": True}) + + async def ended(self) -> None: + await self.app_send_channel.send({"type": "http.request", "body": b"", "more_body": False}) + + async def reset(self) -> None: + await self.app_send_channel.send({"type": "http.disconnect"}) + + async def close(self) -> None: + await self.app_send_channel.send({"type": "http.disconnect"}) + + async def asgi_receive(self) -> dict: + return await self.app_receive_channel.receive() + + +class H2WebsocketStream(H2WebsocketStreamMixin): + """A Websocket Stream.""" + + def __init__(self, app: Type[ASGIFramework], config: Config, asend: Callable) -> None: + self.app = app + self.config = config + self.response: Optional[dict] = None + self.scope: Optional[dict] = None + self.state = ASGIWebsocketState.CONNECTED + self.connection: Optional[wsproto.connection.Connection] = None + + self.asend = asend # type: ignore + self.app_send_channel, self.app_receive_channel = trio.open_memory_channel(10) + + async def data_received(self, data: bytes) -> None: + self.connection.receive_data(data) + for event in self.connection.events(): + if isinstance(event, wsproto.events.TextMessage): + await self.app_send_channel.send({"type": "websocket.receive", "text": event.data}) + elif isinstance(event, wsproto.events.BytesMessage): + await self.app_send_channel.send({"type": "websocket.receive", "bytes": event.data}) + elif isinstance(event, wsproto.events.Ping): + await self.asend(Data(self.connection.send(event.response()))) + elif isinstance(event, wsproto.events.CloseConnection): + if self.connection.state == wsproto.connection.ConnectionState.REMOTE_CLOSING: + await self.asend(Data(self.connection.send(event.response()))) + await self.app_send_channel.send({"type": "websocket.disconnect"}) + break + + async def ended(self) -> None: + await self.app_send_channel.send({"type": "websocket.disconnect"}) + + async def reset(self) -> None: + await self.app_send_channel.send({"type": "websocket.disconnect"}) + + async def close(self) -> None: + await self.app_send_channel.send({"type": "websocket.disconnect"}) + + async def asgi_put(self, message: dict) -> None: + await self.app_send_channel.send(message) + + async def asgi_receive(self) -> dict: + return await self.app_receive_channel.receive() + + +class H2Server(HTTPServer): + def __init__( + self, + app: Type[ASGIFramework], + config: Config, + stream: trio.abc.Stream, + *, + upgrade_request: Optional[h11.Request] = None, + received_data: Optional[bytes] = None, + ) -> None: + super().__init__(stream, "h2") + self.app = app + self.config = config + + self.streams: Dict[int, H2AsyncStream] = {} # type: ignore + self.flow_control: Dict[int, trio.Event] = {} + self.send_lock = trio.Lock() + self.upgrade_request = upgrade_request + self.received_data = received_data + + self.connection = h2.connection.H2Connection( + config=h2.config.H2Configuration(client_side=False, header_encoding=None) + ) + self.connection.DEFAULT_MAX_INBOUND_FRAME_SIZE = config.h2_max_inbound_frame_size + self.connection.local_settings = h2.settings.Settings( + client=False, + initial_values={ + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: config.h2_max_concurrent_streams, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: config.h2_max_header_list_size, + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL: 1, + }, + ) + + async def initiate(self) -> None: + if self.upgrade_request is None: + self.connection.initiate_connection() + if self.received_data: + await self.process_data(self.received_data) + else: + settings = "" + headers = [] + for name, value in self.upgrade_request.headers: + if name.lower() == b"http2-settings": + settings = value.decode() + elif name.lower() == b"host": + headers.append((b":authority", value)) + headers.append((name, value)) + headers.append((b":method", self.upgrade_request.method)) + headers.append((b":path", self.upgrade_request.target)) + self.connection.initiate_upgrade_connection(settings) + event = h2.events.RequestReceived() + event.stream_id = 1 + event.headers = headers + await self.create_stream(event, complete=True) + await self.send() + + async def create_stream( + self, event: h2.events.RequestReceived, *, complete: bool = False + ) -> None: + method: str + for name, value in event.headers: + if name == b":method": + method = value.decode("ascii").upper() + if method == "CONNECT": + self.streams[event.stream_id] = H2WebsocketStream( + self.app, self.config, partial(self.asend, event.stream_id) + ) + else: + self.streams[event.stream_id] = H2HTTPStream( + self.app, self.config, partial(self.asend, event.stream_id) + ) + if complete: + await self.streams[event.stream_id].ended() + self.nursery.start_soon(self.handle_request, event) + + async def handle_request(self, event: h2.events.RequestReceived) -> None: + await self.streams[event.stream_id].handle_request( + event, self.scheme, self.client, self.server + ) + if ( + self.connection.state_machine.state is not h2.connection.ConnectionState.CLOSED + and event.stream_id in self.connection.streams + and not self.connection.streams[event.stream_id].closed + ): + # The connection is not closed and there has been an error + # preventing the stream from closing correctly. + self.connection.reset_stream(event.stream_id) + await self.streams[event.stream_id].close() + del self.streams[event.stream_id] + + async def handle_connection(self) -> None: + try: + async with trio.open_nursery() as nursery: + self.nursery = nursery + await self.initiate() + await self.read_data() + except trio.TooSlowError: + self.connection.close_connection() + await self.send() + except MustCloseError: + await self.send() + except (trio.BrokenResourceError, trio.ClosedResourceError): + pass + finally: + for stream in self.streams.values(): + await stream.close() + await self.aclose() + + async def read_data(self) -> None: + while True: + try: + with trio.fail_after(self.config.keep_alive_timeout): + data = await self.stream.receive_some(MAX_RECV) + except trio.TooSlowError: + if len(self.streams) == 0: + raise + else: + continue # Keep waiting + if data == b"": + return + await self.process_data(data) + + async def process_data(self, data: bytes) -> None: + try: + events = self.connection.receive_data(data) + except h2.exceptions.ProtocolError: + raise MustCloseError() + else: + for event in events: + if isinstance(event, h2.events.RequestReceived): + await self.create_stream(event) + elif isinstance(event, h2.events.DataReceived): + await self.streams[event.stream_id].data_received(event.data) + self.connection.acknowledge_received_data( + event.flow_controlled_length, event.stream_id + ) + elif isinstance(event, h2.events.StreamReset): + await self.streams[event.stream_id].reset() + elif isinstance(event, h2.events.StreamEnded): + await self.streams[event.stream_id].ended() + elif isinstance(event, h2.events.WindowUpdated): + self.window_updated(event.stream_id) + elif isinstance(event, h2.events.ConnectionTerminated): + raise MustCloseError() + await self.send() + + async def asend(self, stream_id: int, event: H2Event) -> None: + connection_state = self.connection.state_machine.state + stream_state = self.connection.streams[stream_id].state_machine.state + if ( + connection_state == h2.connection.ConnectionState.CLOSED + or stream_state == h2.stream.StreamState.CLOSED + ): + return + if isinstance(event, Response): + self.connection.send_headers( + stream_id, event.headers + self.response_headers() # type: ignore + ) + await self.send() + elif isinstance(event, EndStream): + self.connection.end_stream(stream_id) + await self.send() + elif isinstance(event, Data): + await self.send_data(stream_id, event.data) + elif isinstance(event, ServerPush): + await self.server_push(stream_id, event.path, event.headers) + + async def send_data(self, stream_id: int, data: bytes) -> None: + while True: + while not self.connection.local_flow_control_window(stream_id): + await self.wait_for_flow_control(stream_id) + + chunk_size = min(len(data), self.connection.local_flow_control_window(stream_id)) + chunk_size = min(chunk_size, self.connection.max_outbound_frame_size) + if chunk_size < 1: # Pathological client sending negative window sizes + continue + self.connection.send_data(stream_id, data[:chunk_size]) + await self.send() + data = data[chunk_size:] + if not data: + break + + async def send(self) -> None: + data = self.connection.data_to_send() + if data == b"": + return + async with self.send_lock: + try: + await self.stream.send_all(data) + except trio.BrokenResourceError: + pass + + async def wait_for_flow_control(self, stream_id: int) -> None: + event = trio.Event() + self.flow_control[stream_id] = event + await event.wait() + + def window_updated(self, stream_id: Optional[int]) -> None: + if stream_id is None or stream_id == 0: + # Unblock all streams + stream_ids = list(self.flow_control.keys()) + for stream_id in stream_ids: + event = self.flow_control.pop(stream_id) + event.set() + elif stream_id is not None: + if stream_id in self.flow_control: + event = self.flow_control.pop(stream_id) + event.set() + + async def server_push( + self, stream_id: int, path: str, headers: Iterable[Tuple[bytes, bytes]] + ) -> None: + push_stream_id = self.connection.get_next_available_stream_id() + stream = self.streams[stream_id] + for name, value in stream.scope["headers"]: + if name == b":authority": + authority = value + request_headers = [ + (name, value) + for name, value in chain( + [ + (b":method", b"GET"), + (b":path", path.encode()), + (b":scheme", stream.scope["scheme"].encode()), + (b":authority", authority), + ], + headers, + self.response_headers(), + ) + ] + try: + self.connection.push_stream( + stream_id=stream_id, + promised_stream_id=push_stream_id, + request_headers=request_headers, + ) + except h2.exceptions.ProtocolError: + # Client does not accept push promises or we are trying to + # push on a push promises request. + pass + else: + event = h2.events.RequestReceived() + event.stream_id = push_stream_id + event.headers = request_headers + await self.create_stream(event, complete=True) + + @property + def scheme(self) -> str: + return "https" if self._is_ssl else "http" diff --git a/py3/lib/python3.6/site-packages/hypercorn/trio/lifespan.py b/py3/lib/python3.6/site-packages/hypercorn/trio/lifespan.py new file mode 100644 index 0000000..b6a7a02 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/trio/lifespan.py @@ -0,0 +1,70 @@ +from typing import Type + +import trio + +from ..config import Config +from ..typing import ASGIFramework + + +class UnexpectedMessage(Exception): + pass + + +class Lifespan: + def __init__(self, app: Type[ASGIFramework], config: Config) -> None: + self.app = app + self.config = config + self.startup = trio.Event() + self.shutdown = trio.Event() + self.app_send_channel, self.app_receive_channel = trio.open_memory_channel(10) + self.supported = True + + async def handle_lifespan( + self, *, task_status: trio._core._run._TaskStatus = trio.TASK_STATUS_IGNORED + ) -> None: + task_status.started() + scope = {"type": "lifespan"} + try: + asgi_instance = self.app(scope) + except Exception: + self.supported = False + if self.config.error_logger is not None: + self.config.error_logger.warning( + "ASGI Framework Lifespan error, continuing without Lifespan support" + ) + else: + try: + await asgi_instance(self.asgi_receive, self.asgi_send) + except Exception: + if self.config.error_logger is not None: + self.config.error_logger.exception("Error in ASGI Framework") + finally: + await self.app_send_channel.aclose() + await self.app_receive_channel.aclose() + + async def wait_for_startup(self) -> None: + if not self.supported: + return + + await self.app_send_channel.send({"type": "lifespan.startup"}) + with trio.fail_after(self.config.startup_timeout): + await self.startup.wait() + + async def wait_for_shutdown(self) -> None: + if not self.supported: + return + + await self.app_send_channel.send({"type": "lifespan.shutdown"}) + with trio.fail_after(self.config.shutdown_timeout): + await self.shutdown.wait() + + async def asgi_receive(self) -> dict: + return await self.app_receive_channel.receive() + + async def asgi_send(self, message: dict) -> None: + if message["type"] == "lifespan.startup.complete": + self.startup.set() + elif message["type"] == "lifespan.shutdown.complete": + self.shutdown.set() + else: + raise UnexpectedMessage(message["type"]) diff --git a/py3/lib/python3.6/site-packages/hypercorn/trio/run.py b/py3/lib/python3.6/site-packages/hypercorn/trio/run.py new file mode 100644 index 0000000..d936cdc --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/trio/run.py @@ -0,0 +1,116 @@ +from functools import partial +from multiprocessing.synchronize import Event as EventType +from socket import socket +from typing import List, Optional, Type + +import trio + +from .h2 import H2Server +from .h11 import H11Server +from .lifespan import Lifespan +from .wsproto import WebsocketServer +from ..asgi.run import H2CProtocolRequired, H2ProtocolAssumed, WebsocketProtocolRequired +from ..config import Config +from ..typing import ASGIFramework +from ..utils import ( + check_shutdown, + load_application, + MustReloadException, + observe_changes, + restart, + Shutdown, +) + + +async def serve_stream(app: Type[ASGIFramework], config: Config, stream: trio.abc.Stream) -> None: + if config.ssl_enabled: + try: + await stream.do_handshake() + except trio.BrokenResourceError: + return # Handshake failed + selected_protocol = stream.selected_alpn_protocol() + else: + selected_protocol = "http/1.1" + + if selected_protocol == "h2": + protocol = H2Server(app, config, stream) + else: + protocol = H11Server(app, config, stream) # type: ignore + try: + await protocol.handle_connection() + except WebsocketProtocolRequired as error: + protocol = WebsocketServer( # type: ignore + app, config, stream, upgrade_request=error.request + ) + await protocol.handle_connection() + except H2CProtocolRequired as error: + protocol = H2Server(app, config, stream, upgrade_request=error.request) + await protocol.handle_connection() + except H2ProtocolAssumed as error: + protocol = H2Server(app, config, stream, received_data=error.data) + await protocol.handle_connection() + + +async def worker_serve( + app: Type[ASGIFramework], + config: Config, + *, + sockets: Optional[List[socket]] = None, + shutdown_event: Optional[EventType] = None, + task_status: trio._core._run._TaskStatus = trio.TASK_STATUS_IGNORED, +) -> None: + lifespan = Lifespan(app, config) + reload_ = False + + async with trio.open_nursery() as lifespan_nursery: + await lifespan_nursery.start(lifespan.handle_lifespan) + await lifespan.wait_for_startup() + + try: + async with trio.open_nursery() as nursery: + if config.use_reloader: + nursery.start_soon(observe_changes, trio.sleep) + + if shutdown_event is not None: + nursery.start_soon(check_shutdown, shutdown_event, trio.sleep) + + if sockets is None: + sockets = config.create_sockets() + for sock in sockets: + sock.listen(config.backlog) + listeners = [ + trio.SocketListener(trio.socket.from_stdlib_socket(sock)) for sock in sockets + ] + if config.ssl_enabled: + listeners = [ + trio.ssl.SSLListener( + tcp_listener, config.create_ssl_context(), https_compatible=True + ) + for tcp_listener in listeners + ] + + task_status.started() + await trio.serve_listeners(partial(serve_stream, app, config), listeners) + + except MustReloadException: + reload_ = True + except (Shutdown, KeyboardInterrupt): + pass + finally: + await lifespan.wait_for_shutdown() + lifespan_nursery.cancel_scope.cancel() + + if reload_: + restart() + + +def trio_worker( + config: Config, + sockets: Optional[List[socket]] = None, + shutdown_event: Optional[EventType] = None, +) -> None: + if sockets is not None: + for sock in sockets: + sock.listen(config.backlog) + app = load_application(config.application_path) + trio.run(partial(worker_serve, app, config, sockets=sockets, shutdown_event=shutdown_event)) diff --git a/py3/lib/python3.6/site-packages/hypercorn/trio/wsproto.py b/py3/lib/python3.6/site-packages/hypercorn/trio/wsproto.py new file mode 100644 index 0000000..8d01b12 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/trio/wsproto.py @@ -0,0 +1,107 @@ +from typing import Optional, Type + +import h11 +import trio +from wsproto import ConnectionType, WSConnection +from wsproto.connection import ConnectionState +from wsproto.events import CloseConnection, Event, Message, Ping, Request +from wsproto.frame_protocol import CloseReason + +from .base import HTTPServer +from ..asgi.utils import ASGIWebsocketState, FrameTooLarge, WebsocketBuffer +from ..asgi.wsproto import WebsocketMixin +from ..config import Config +from ..typing import ASGIFramework + +MAX_RECV = 2 ** 16 + + +class MustCloseError(Exception): + pass + + +class WebsocketServer(HTTPServer, WebsocketMixin): + def __init__( + self, + app: Type[ASGIFramework], + config: Config, + stream: trio.abc.Stream, + *, + upgrade_request: Optional[h11.Request] = None, + ) -> None: + super().__init__(stream, "wsproto") + self.app = app + self.config = config + self.connection = WSConnection(ConnectionType.SERVER) + self.response: Optional[dict] = None + self.scope: Optional[dict] = None + self.send_lock = trio.Lock() + self.state = ASGIWebsocketState.HANDSHAKE + + self.buffer = WebsocketBuffer(self.config.websocket_max_message_size) + self.app_send_channel, self.app_receive_channel = trio.open_memory_channel(10) + + if upgrade_request is not None: + self.connection.initiate_upgrade_connection( + upgrade_request.headers, upgrade_request.target + ) + + async def handle_connection(self) -> None: + try: + request = await self.read_request() + async with trio.open_nursery() as nursery: + nursery.start_soon(self.read_messages) + await self.handle_websocket(request) + if self.state == ASGIWebsocketState.HTTPCLOSED: + raise MustCloseError() + except (trio.BrokenResourceError, trio.ClosedResourceError): + await self.asgi_put({"type": "websocket.disconnect"}) + except MustCloseError: + pass + finally: + await self.aclose() + + async def read_request(self) -> Request: + for event in self.connection.events(): + if isinstance(event, Request): + return event + + async def read_messages(self) -> None: + while True: + data = await self.stream.receive_some(MAX_RECV) + if data == b"": + data = None # wsproto expects None rather than b"" for EOF + self.connection.receive_data(data) + for event in self.connection.events(): + if isinstance(event, Message): + try: + self.buffer.extend(event) + except FrameTooLarge: + await self.asend(CloseConnection(code=CloseReason.MESSAGE_TOO_BIG)) + await self.asgi_put({"type": "websocket.disconnect"}) + raise MustCloseError() + + if event.message_finished: + await self.asgi_put(self.buffer.to_message()) + self.buffer.clear() + elif isinstance(event, Ping): + await self.asend(event.response()) + elif isinstance(event, CloseConnection): + if self.connection.state == ConnectionState.REMOTE_CLOSING: + await self.asend(event.response()) + await self.asgi_put({"type": "websocket.disconnect"}) + raise MustCloseError() + + async def asend(self, event: Event) -> None: + async with self.send_lock: + await self.stream.send_all(self.connection.send(event)) + + async def asgi_put(self, message: dict) -> None: + await self.app_send_channel.send(message) + + async def asgi_receive(self) -> dict: + return await self.app_receive_channel.receive() + + @property + def scheme(self) -> str: + return "wss" if self._is_ssl else "ws" diff --git a/py3/lib/python3.6/site-packages/hypercorn/typing.py b/py3/lib/python3.6/site-packages/hypercorn/typing.py new file mode 100644 index 0000000..41e5531 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/typing.py @@ -0,0 +1,73 @@ +import socket +from multiprocessing.synchronize import Event as EventType +from typing import Callable, List, Optional, Tuple, Union + +import h2.events +import h11 +from typing_extensions import Protocol # Till PEP 544 is accepted + +from .config import Config + +H11SendableEvent = Union[h11.Data, h11.EndOfMessage, h11.InformationalResponse, h11.Response] + +WorkerFunc = Callable[[Config, Optional[List[socket.socket]], Optional[EventType]], None] + + +class ASGIFramework(Protocol): + # Should replace with a Protocol when PEP 544 is accepted. + + def __init__(self, scope: dict) -> None: + ... + + async def __call__(self, receive: Callable, send: Callable) -> None: + ... + + +class H2SyncStream(Protocol): + scope: dict + + def data_received(self, data: bytes) -> None: + ... + + def ended(self) -> None: + ... + + def reset(self) -> None: + ... + + def close(self) -> None: + ... + + async def handle_request( + self, + event: h2.events.RequestReceived, + scheme: str, + client: Tuple[str, int], + server: Tuple[str, int], + ) -> None: + ... + + +class H2AsyncStream(Protocol): + scope: dict + + async def data_received(self, data: bytes) -> None: + ... + + async def ended(self) -> None: + ... + + async def reset(self) -> None: + ... + + async def close(self) -> None: + ... + + async def handle_request( + self, + event: h2.events.RequestReceived, + scheme: str, + client: Tuple[str, int], + server: Tuple[str, int], + ) -> None: + ... diff --git a/py3/lib/python3.6/site-packages/hypercorn/utils.py b/py3/lib/python3.6/site-packages/hypercorn/utils.py new file mode 100644 index 0000000..4fa35f9 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hypercorn/utils.py @@ -0,0 +1,139 @@ +import os +import platform +import socket +import sys +from importlib import import_module +from multiprocessing.synchronize import Event as EventType +from pathlib import Path +from time import time +from types import ModuleType +from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple, Type +from wsgiref.handlers import format_date_time + +from .typing import ASGIFramework + + +class Shutdown(Exception): + pass + + +class MustReloadException(Exception): + pass + + +class NoAppException(Exception): + pass + + +def suppress_body(method: str, status_code: int) -> bool: + return method == "HEAD" or 100 <= status_code < 200 or status_code in {204, 304, 412} + + +def response_headers(protocol: str) -> List[Tuple[bytes, bytes]]: + return [ + (b"date", format_date_time(time()).encode("ascii")), + (b"server", f"hypercorn-{protocol}".encode("ascii")), + ] + + +def load_application(path: str) -> Type[ASGIFramework]: + try: + module_name, app_name = path.split(":", 1) + except ValueError: + module_name, app_name = path, "app" + except AttributeError: + raise NoAppException() + + module_path = Path(module_name).resolve() + sys.path.insert(0, str(module_path.parent)) + if module_path.is_file(): + import_name = module_path.with_suffix("").name + else: + import_name = module_path.name + try: + module = import_module(import_name) + except ModuleNotFoundError as error: + if error.name == import_name: # type: ignore + raise NoAppException() + else: + raise + + try: + return eval(app_name, vars(module)) + except NameError: + raise NoAppException() + + +async def observe_changes(sleep: Callable[[float], Awaitable[Any]]) -> None: + last_updates: Dict[ModuleType, float] = {} + while True: + for module in list(sys.modules.values()): + filename = getattr(module, "__file__", None) + if filename is None: + continue + try: + mtime = Path(filename).stat().st_mtime + except FileNotFoundError: + continue + else: + if mtime > last_updates.get(module, mtime): + raise MustReloadException() + last_updates[module] = mtime + await sleep(1) + + +def restart() -> None: + # Restart this process (only safe for dev/debug) + executable = sys.executable + script_path = Path(sys.argv[0]).resolve() + args = sys.argv[1:] + main_package = sys.modules["__main__"].__package__ + + if main_package is None: + # Executed by filename + if platform.system() == "Windows": + if not script_path.exists() and script_path.with_suffix(".exe").exists(): + # quart run + executable = str(script_path.with_suffix(".exe")) + else: + # python run.py + args.append(str(script_path)) + else: + if script_path.is_file() and os.access(script_path, os.X_OK): + # hypercorn run:app --reload + executable = str(script_path) + else: + # python run.py + args.append(str(script_path)) + else: + # Executed as a module e.g. python -m run + module = script_path.stem + import_name = main_package + if module != "__main__": + import_name = f"{main_package}.{module}" + args[:0] = ["-m", import_name.lstrip(".")] + + os.execv(executable, [executable] + args) + + +async def check_shutdown( + shutdown_event: EventType, sleep: Callable[[float], Awaitable[Any]] +) -> None: + while True: + if shutdown_event.is_set(): + raise Shutdown() + await sleep(0.1) + + +def write_pid_file(pid_path: str) -> None: + with open(pid_path, "w") as file_: + file_.write(f"{os.getpid()}") + + +def parse_socket_addr(family: int, address: tuple) -> Optional[Tuple[str, int]]: + if family == socket.AF_INET: + return address # type: ignore + elif family == socket.AF_INET6: + return (address[0], address[1]) + else: + return None diff --git a/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/LICENSE b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/LICENSE new file mode 100644 index 0000000..d24c351 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Cory Benfield + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/METADATA b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/METADATA new file mode 100644 index 0000000..51f50e7 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/METADATA @@ -0,0 +1,244 @@ +Metadata-Version: 2.1 +Name: hyperframe +Version: 5.2.0 +Summary: HTTP/2 framing layer for Python +Home-page: https://python-hyper.org/hyperframe/en/latest/ +Author: Cory Benfield +Author-email: cory@lukasa.co.uk +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython + +====================================== +hyperframe: Pure-Python HTTP/2 framing +====================================== + +.. image:: https://travis-ci.org/python-hyper/hyperframe.png?branch=master + :target: https://travis-ci.org/python-hyper/hyperframe + +This library contains the HTTP/2 framing code used in the `hyper`_ project. It +provides a pure-Python codebase that is capable of decoding a binary stream +into HTTP/2 frames. + +This library is used directly by `hyper`_ and a number of other projects to +provide HTTP/2 frame decoding logic. + +Contributing +============ + +hyperframe welcomes contributions from anyone! Unlike many other projects we +are happy to accept cosmetic contributions and small contributions, in addition +to large feature requests and changes. + +Before you contribute (either by opening an issue or filing a pull request), +please `read the contribution guidelines`_. + +.. _read the contribution guidelines: http://hyper.readthedocs.org/en/development/contributing.html + +License +======= + +hyperframe is made available under the MIT License. For more details, see the +``LICENSE`` file in the repository. + +Authors +======= + +hyperframe is maintained by Cory Benfield, with contributions from others. For +more details about the contributors, please see ``CONTRIBUTORS.rst``. + +.. _hyper: http://python-hyper.org/ + + +Release History +=============== + +6.0.0dev0 +--------- + +5.2.0 (2019-01-18) +------------------ + +**API Changes (Backward-compatible)** + +- Add a new ENABLE_CONNECT_PROTOCOL settings paramter. + +**Other Changes** + +- Fix collections.abc deprecation. +- Drop support for Python 3.3 and support 3.7. + +5.1.0 (2017-04-24) +------------------ + +**API Changes (Backward-compatible)** + +- Added support for ``DataFrame.data`` being a ``memoryview`` object. + +5.0.0 (2017-03-07) +------------------ + +**Backwards Incompatible API Changes** + +- Added support for unknown extension frames. These will be returned in the new + ``ExtensionFrame`` object. The flag information for these frames is persisted + in ``flag_byte`` if needed. + +4.0.2 (2017-02-20) +------------------ + +**Bugfixes** + +- Fixed AltSvc stream association, which was incorrectly set to ``'both'``: + should have been ``'either'``. +- Fixed a bug where stream IDs on received frames were allowed to be 32-bit, + instead of 31-bit. +- Fixed a bug with frames that had the ``PADDING`` flag set but zero-length + padding, whose flow-controlled length was calculated wrongly. +- Miscellaneous performance improvements to serialization and parsing logic. + +4.0.1 (2016-03-13) +------------------ + +**Bugfixes** + +- Fixed bug with the repr of ``AltSvcFrame``, where building it could throw + exceptions if the frame had been received from the network. + +4.0.0 (2016-03-13) +------------------ + +**Backwards Incompatible API Changes** + +- Updated old ALTSVC frame definition to match the newly specified RFC 7838. +- Remove BLOCKED frame, which was never actually specified. +- Removed previously deprecated ``SettingsFrame.SETTINGS_MAX_FRAME_SIZE`` and + ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE``. + +3.2.0 (2016-02-02) +------------------ + +**API Changes (Backward-compatible)** + +- Invalid PING frame bodies now raise ``InvalidFrameError``, not + ``ValueError``. Note that ``InvalidFrameError`` is a ``ValueError`` subclass. +- Invalid RST_STREAM frame bodies now raise ``InvalidFramError``, not + ``ValueError``. Note that ``InvalidFrameError`` is a ``ValueError`` subclass. +- Canonicalized the names of ``SettingsFrame.SETTINGS_MAX_FRAME_SIZE`` and + ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE`` to match their peers, by + adding new properties ``SettingsFrame.MAX_FRAME_SIZE`` and + ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE``. The old names are still + present, but will be deprecated in 4.0.0. + +**Bugfixes** + +- The change in ``3.1.0`` that ensured that ``InvalidFrameError`` would be + thrown did not affect certain invalid values in ALT_SVC frames. This has been + fixed: ``ValueError`` will no longer be thrown from invalid ALT_SVC bodies. + +3.1.1 (2016-01-18) +------------------ + +**Bugfixes** + +- Correctly error when receiving Ping frames that have insufficient data. + +3.1.0 (2016-01-13) +------------------ + +**API Changes** + +- Added new ``InvalidFrameError`` that is thrown instead of ``struct.error`` + when parsing a frame. + +**Bugfixes** + +- Fixed error when trying to serialize frames that use Priority information + with the defaults for that information. +- Fixed errors when displaying the repr of frames with non-printable bodies. + +3.0.1 (2016-01-08) +------------------ + +**Bugfixes** + +- Fix issue where unpadded DATA, PUSH_PROMISE and HEADERS frames that had empty + bodies would raise ``InvalidPaddingError`` exceptions when parsed. + +3.0.0 (2016-01-08) +------------------ + +**Backwards Incompatible API Changes** + +- Parsing padded frames that have invalid padding sizes now throws an + ``InvalidPaddingError``. + +2.2.0 (2015-10-15) +------------------ + +**API Changes** + +- When an unknown frame is encountered, ``parse_frame_header`` now throws a + ``ValueError`` subclass: ``UnknownFrameError``. This subclass contains the + frame type and the length of the frame body. + +2.1.0 (2015-10-06) +------------------ + +**API Changes** + +- Frames parsed from binary data now carry a ``body_len`` attribute that + matches the frame length (minus the frame header). + +2.0.0 (2015-09-21) +------------------ + +**API Changes** + +- Attempting to parse unrecognised frames now throws ``ValueError`` instead of + ``KeyError``. Thanks to @Kriechi! +- Flags are now validated for correctness, preventing setting flags that + ``hyperframe`` does not recognise and that would not serialize. Thanks to + @mhils! +- Frame properties can now be initialized in the constructors. Thanks to @mhils + and @Kriechi! +- Frames that cannot be sent on a stream now have their stream ID defaulted + to ``0``. Thanks to @Kriechi! + +**Other Changes** + +- Frames have a more useful repr. Thanks to @mhils! + +1.1.1 (2015-07-20) +------------------ + +- Fix a bug where ``FRAME_MAX_LEN`` was one byte too small. + +1.1.0 (2015-06-28) +------------------ + +- Add ``body_len`` property to frames to enable introspection of the actual + frame length. Thanks to @jdecuyper! + +1.0.1 (2015-06-27) +------------------ + +- Fix bug where the frame header would have an incorrect length added to it. + +1.0.0 (2015-04-12) +------------------ + +- Initial extraction from hyper. + + diff --git a/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/RECORD b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/RECORD new file mode 100644 index 0000000..7245115 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/RECORD @@ -0,0 +1,14 @@ +hyperframe/__init__.py,sha256=9ql6Hb25Q3PrN4r7JdmX8UMzN3cFcDLQe4_qYf8iWJc,136 +hyperframe/exceptions.py,sha256=cFLMN1IjXSPyhpWGOtfhPrVM5qu7kprOQJHSaWSOd88,936 +hyperframe/flags.py,sha256=O_Mvcvzcc-HR_X-dPMb_Z7GMttowb9G9lDyim8R4I7s,1286 +hyperframe/frame.py,sha256=UclwgZMXUnX9IBwKJWvUJcG1JrDnKbBtcXnx4v4uqkw,26658 +hyperframe-5.2.0.dist-info/LICENSE,sha256=djqTQqBN9iBGydx0ilKHk06wpTMcaGOzygruIOGMtO0,1080 +hyperframe-5.2.0.dist-info/METADATA,sha256=AX9DhnXJ__T6f3nKijVXUIQoRZRcoRCX-OuVjdE0p5k,7232 +hyperframe-5.2.0.dist-info/WHEEL,sha256=_wJFdOYk7i3xxT8ElOkUJvOdOvfNGbR9g-bf6UQT6sU,110 +hyperframe-5.2.0.dist-info/top_level.txt,sha256=aIXWNxzKF_jwE8lyWG5Paqn5RP7PDBYeguraia-oHJE,11 +hyperframe-5.2.0.dist-info/RECORD,, +hyperframe-5.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +hyperframe/__pycache__/flags.cpython-36.pyc,, +hyperframe/__pycache__/frame.cpython-36.pyc,, +hyperframe/__pycache__/__init__.cpython-36.pyc,, +hyperframe/__pycache__/exceptions.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/WHEEL b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/WHEEL new file mode 100644 index 0000000..c4bde30 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.3) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/top_level.txt new file mode 100644 index 0000000..b21bb7c --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe-5.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +hyperframe diff --git a/py3/lib/python3.6/site-packages/hyperframe/__init__.py b/py3/lib/python3.6/site-packages/hyperframe/__init__.py new file mode 100644 index 0000000..7620b4b --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +""" +hyperframe +~~~~~~~~~~ + +A module for providing a pure-Python HTTP/2 framing layer. +""" +__version__ = '5.2.0' diff --git a/py3/lib/python3.6/site-packages/hyperframe/exceptions.py b/py3/lib/python3.6/site-packages/hyperframe/exceptions.py new file mode 100644 index 0000000..dd30369 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe/exceptions.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +""" +hyperframe/exceptions +~~~~~~~~~~~~~~~~~~~~~ + +Defines the exceptions that can be thrown by hyperframe. +""" + + +class UnknownFrameError(ValueError): + """ + An frame of unknown type was received. + """ + def __init__(self, frame_type, length): + #: The type byte of the unknown frame that was received. + self.frame_type = frame_type + + #: The length of the data portion of the unknown frame. + self.length = length + + def __str__(self): + return ( + "UnknownFrameError: Unknown frame type 0x%X received, " + "length %d bytes" % (self.frame_type, self.length) + ) + + +class InvalidPaddingError(ValueError): + """ + A frame with invalid padding was received. + """ + pass + + +class InvalidFrameError(ValueError): + """ + Parsing a frame failed because the data was not laid out appropriately. + + .. versionadded:: 3.0.2 + """ + pass diff --git a/py3/lib/python3.6/site-packages/hyperframe/flags.py b/py3/lib/python3.6/site-packages/hyperframe/flags.py new file mode 100644 index 0000000..1660bd1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe/flags.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +""" +hyperframe/flags +~~~~~~~~~~~~~~~~ + +Defines basic Flag and Flags data structures. +""" +import collections + +try: + from collections.abc import MutableSet +except ImportError: # pragma: no cover + # Python 2.7 compatibility + from collections import MutableSet + +Flag = collections.namedtuple("Flag", ["name", "bit"]) + + +class Flags(MutableSet): + """ + A simple MutableSet implementation that will only accept known flags as + elements. + + Will behave like a regular set(), except that a ValueError will be thrown + when .add()ing unexpected flags. + """ + def __init__(self, defined_flags): + self._valid_flags = set(flag.name for flag in defined_flags) + self._flags = set() + + def __contains__(self, x): + return self._flags.__contains__(x) + + def __iter__(self): + return self._flags.__iter__() + + def __len__(self): + return self._flags.__len__() + + def discard(self, value): + return self._flags.discard(value) + + def add(self, value): + if value not in self._valid_flags: + raise ValueError( + "Unexpected flag: {}. Valid flags are: {}".format( + value, self._valid_flags + ) + ) + return self._flags.add(value) diff --git a/py3/lib/python3.6/site-packages/hyperframe/frame.py b/py3/lib/python3.6/site-packages/hyperframe/frame.py new file mode 100644 index 0000000..7950572 --- /dev/null +++ b/py3/lib/python3.6/site-packages/hyperframe/frame.py @@ -0,0 +1,822 @@ +# -*- coding: utf-8 -*- +""" +hyperframe/frame +~~~~~~~~~~~~~~~~ + +Defines framing logic for HTTP/2. Provides both classes to represent framed +data and logic for aiding the connection when it comes to reading from the +socket. +""" +import struct +import binascii + +from .exceptions import ( + UnknownFrameError, InvalidPaddingError, InvalidFrameError +) +from .flags import Flag, Flags + + +# The maximum initial length of a frame. Some frames have shorter maximum +# lengths. +FRAME_MAX_LEN = (2 ** 14) + +# The maximum allowed length of a frame. +FRAME_MAX_ALLOWED_LEN = (2 ** 24) - 1 + +# Stream association enumerations. +_STREAM_ASSOC_HAS_STREAM = "has-stream" +_STREAM_ASSOC_NO_STREAM = "no-stream" +_STREAM_ASSOC_EITHER = "either" + +# Structs for packing and unpacking +_STRUCT_HBBBL = struct.Struct(">HBBBL") +_STRUCT_LL = struct.Struct(">LL") +_STRUCT_HL = struct.Struct(">HL") +_STRUCT_LB = struct.Struct(">LB") +_STRUCT_L = struct.Struct(">L") +_STRUCT_H = struct.Struct(">H") +_STRUCT_B = struct.Struct(">B") + + +class Frame(object): + """ + The base class for all HTTP/2 frames. + """ + #: The flags defined on this type of frame. + defined_flags = [] + + #: The byte used to define the type of the frame. + type = None + + # If 'has-stream', the frame's stream_id must be non-zero. If 'no-stream', + # it must be zero. If 'either', it's not checked. + stream_association = None + + def __init__(self, stream_id, flags=()): + #: The stream identifier for the stream this frame was received on. + #: Set to 0 for frames sent on the connection (stream-id 0). + self.stream_id = stream_id + + #: The flags set for this frame. + self.flags = Flags(self.defined_flags) + + #: The frame length, excluding the nine-byte header. + self.body_len = 0 + + for flag in flags: + self.flags.add(flag) + + if (not self.stream_id and + self.stream_association == _STREAM_ASSOC_HAS_STREAM): + raise ValueError('Stream ID must be non-zero') + if (self.stream_id and + self.stream_association == _STREAM_ASSOC_NO_STREAM): + raise ValueError('Stream ID must be zero') + + def __repr__(self): + flags = ", ".join(self.flags) or "None" + body = binascii.hexlify(self.serialize_body()).decode('ascii') + if len(body) > 20: + body = body[:20] + "..." + return ( + "{type}(Stream: {stream}; Flags: {flags}): {body}" + ).format( + type=type(self).__name__, + stream=self.stream_id, + flags=flags, + body=body + ) + + @staticmethod + def parse_frame_header(header, strict=False): + """ + Takes a 9-byte frame header and returns a tuple of the appropriate + Frame object and the length that needs to be read from the socket. + + This populates the flags field, and determines how long the body is. + + :param strict: Whether to raise an exception when encountering a frame + not defined by spec and implemented by hyperframe. + + :raises hyperframe.exceptions.UnknownFrameError: If a frame of unknown + type is received. + + .. versionchanged:: 5.0.0 + Added :param:`strict` to accommodate :class:`ExtensionFrame` + """ + try: + fields = _STRUCT_HBBBL.unpack(header) + except struct.error: + raise InvalidFrameError("Invalid frame header") + + # First 24 bits are frame length. + length = (fields[0] << 8) + fields[1] + type = fields[2] + flags = fields[3] + stream_id = fields[4] & 0x7FFFFFFF + + try: + frame = FRAMES[type](stream_id) + except KeyError: + if strict: + raise UnknownFrameError(type, length) + frame = ExtensionFrame(type=type, stream_id=stream_id) + + frame.parse_flags(flags) + return (frame, length) + + def parse_flags(self, flag_byte): + for flag, flag_bit in self.defined_flags: + if flag_byte & flag_bit: + self.flags.add(flag) + + return self.flags + + def serialize(self): + """ + Convert a frame into a bytestring, representing the serialized form of + the frame. + """ + body = self.serialize_body() + self.body_len = len(body) + + # Build the common frame header. + # First, get the flags. + flags = 0 + + for flag, flag_bit in self.defined_flags: + if flag in self.flags: + flags |= flag_bit + + header = _STRUCT_HBBBL.pack( + (self.body_len >> 8) & 0xFFFF, # Length spread over top 24 bits + self.body_len & 0xFF, + self.type, + flags, + self.stream_id & 0x7FFFFFFF # Stream ID is 32 bits. + ) + + return header + body + + def serialize_body(self): + raise NotImplementedError() + + def parse_body(self, data): + """ + Given the body of a frame, parses it into frame data. This populates + the non-header parts of the frame: that is, it does not populate the + stream ID or flags. + + :param data: A memoryview object containing the body data of the frame. + Must not contain *more* data than the length returned by + :meth:`parse_frame_header + `. + """ + raise NotImplementedError() + + +class Padding(object): + """ + Mixin for frames that contain padding. Defines extra fields that can be + used and set by frames that can be padded. + """ + def __init__(self, stream_id, pad_length=0, **kwargs): + super(Padding, self).__init__(stream_id, **kwargs) + + #: The length of the padding to use. + self.pad_length = pad_length + + def serialize_padding_data(self): + if 'PADDED' in self.flags: + return _STRUCT_B.pack(self.pad_length) + return b'' + + def parse_padding_data(self, data): + if 'PADDED' in self.flags: + try: + self.pad_length = struct.unpack('!B', data[:1])[0] + except struct.error: + raise InvalidFrameError("Invalid Padding data") + return 1 + return 0 + + @property + def total_padding(self): + return self.pad_length + + +class Priority(object): + """ + Mixin for frames that contain priority data. Defines extra fields that can + be used and set by frames that contain priority data. + """ + def __init__(self, + stream_id, + depends_on=0x0, + stream_weight=0x0, + exclusive=False, + **kwargs): + super(Priority, self).__init__(stream_id, **kwargs) + + #: The stream ID of the stream on which this stream depends. + self.depends_on = depends_on + + #: The weight of the stream. This is an integer between 0 and 256. + self.stream_weight = stream_weight + + #: Whether the exclusive bit was set. + self.exclusive = exclusive + + def serialize_priority_data(self): + return _STRUCT_LB.pack( + self.depends_on + (0x80000000 if self.exclusive else 0), + self.stream_weight + ) + + def parse_priority_data(self, data): + try: + self.depends_on, self.stream_weight = _STRUCT_LB.unpack(data[:5]) + except struct.error: + raise InvalidFrameError("Invalid Priority data") + + self.exclusive = True if self.depends_on >> 31 else False + self.depends_on &= 0x7FFFFFFF + return 5 + + +class DataFrame(Padding, Frame): + """ + DATA frames convey arbitrary, variable-length sequences of octets + associated with a stream. One or more DATA frames are used, for instance, + to carry HTTP request or response payloads. + """ + #: The flags defined for DATA frames. + defined_flags = [ + Flag('END_STREAM', 0x01), + Flag('PADDED', 0x08), + ] + + #: The type byte for data frames. + type = 0x0 + + stream_association = _STREAM_ASSOC_HAS_STREAM + + def __init__(self, stream_id, data=b'', **kwargs): + super(DataFrame, self).__init__(stream_id, **kwargs) + + #: The data contained on this frame. + self.data = data + + def serialize_body(self): + padding_data = self.serialize_padding_data() + padding = b'\0' * self.total_padding + if isinstance(self.data, memoryview): + self.data = self.data.tobytes() + return b''.join([padding_data, self.data, padding]) + + def parse_body(self, data): + padding_data_length = self.parse_padding_data(data) + self.data = ( + data[padding_data_length:len(data)-self.total_padding].tobytes() + ) + self.body_len = len(data) + + if self.total_padding and self.total_padding >= self.body_len: + raise InvalidPaddingError("Padding is too long.") + + @property + def flow_controlled_length(self): + """ + The length of the frame that needs to be accounted for when considering + flow control. + """ + padding_len = 0 + if 'PADDED' in self.flags: + # Account for extra 1-byte padding length field, which is still + # present if possibly zero-valued. + padding_len = self.total_padding + 1 + return len(self.data) + padding_len + + +class PriorityFrame(Priority, Frame): + """ + The PRIORITY frame specifies the sender-advised priority of a stream. It + can be sent at any time for an existing stream. This enables + reprioritisation of existing streams. + """ + #: The flags defined for PRIORITY frames. + defined_flags = [] + + #: The type byte defined for PRIORITY frames. + type = 0x02 + + stream_association = _STREAM_ASSOC_HAS_STREAM + + def serialize_body(self): + return self.serialize_priority_data() + + def parse_body(self, data): + self.parse_priority_data(data) + self.body_len = len(data) + + +class RstStreamFrame(Frame): + """ + The RST_STREAM frame allows for abnormal termination of a stream. When sent + by the initiator of a stream, it indicates that they wish to cancel the + stream or that an error condition has occurred. When sent by the receiver + of a stream, it indicates that either the receiver is rejecting the stream, + requesting that the stream be cancelled or that an error condition has + occurred. + """ + #: The flags defined for RST_STREAM frames. + defined_flags = [] + + #: The type byte defined for RST_STREAM frames. + type = 0x03 + + stream_association = _STREAM_ASSOC_HAS_STREAM + + def __init__(self, stream_id, error_code=0, **kwargs): + super(RstStreamFrame, self).__init__(stream_id, **kwargs) + + #: The error code used when resetting the stream. + self.error_code = error_code + + def serialize_body(self): + return _STRUCT_L.pack(self.error_code) + + def parse_body(self, data): + if len(data) != 4: + raise InvalidFrameError( + "RST_STREAM must have 4 byte body: actual length %s." % + len(data) + ) + + try: + self.error_code = _STRUCT_L.unpack(data)[0] + except struct.error: # pragma: no cover + raise InvalidFrameError("Invalid RST_STREAM body") + + self.body_len = 4 + + +class SettingsFrame(Frame): + """ + The SETTINGS frame conveys configuration parameters that affect how + endpoints communicate. The parameters are either constraints on peer + behavior or preferences. + + Settings are not negotiated. Settings describe characteristics of the + sending peer, which are used by the receiving peer. Different values for + the same setting can be advertised by each peer. For example, a client + might set a high initial flow control window, whereas a server might set a + lower value to conserve resources. + """ + #: The flags defined for SETTINGS frames. + defined_flags = [Flag('ACK', 0x01)] + + #: The type byte defined for SETTINGS frames. + type = 0x04 + + stream_association = _STREAM_ASSOC_NO_STREAM + + # We need to define the known settings, they may as well be class + # attributes. + #: The byte that signals the SETTINGS_HEADER_TABLE_SIZE setting. + HEADER_TABLE_SIZE = 0x01 + #: The byte that signals the SETTINGS_ENABLE_PUSH setting. + ENABLE_PUSH = 0x02 + #: The byte that signals the SETTINGS_MAX_CONCURRENT_STREAMS setting. + MAX_CONCURRENT_STREAMS = 0x03 + #: The byte that signals the SETTINGS_INITIAL_WINDOW_SIZE setting. + INITIAL_WINDOW_SIZE = 0x04 + #: The byte that signals the SETTINGS_MAX_FRAME_SIZE setting. + MAX_FRAME_SIZE = 0x05 + #: The byte that signals the SETTINGS_MAX_HEADER_LIST_SIZE setting. + MAX_HEADER_LIST_SIZE = 0x06 + #: The byte that signals SETTINGS_ENABLE_CONNECT_PROTOCOL setting. + ENABLE_CONNECT_PROTOCOL = 0x08 + + def __init__(self, stream_id=0, settings=None, **kwargs): + super(SettingsFrame, self).__init__(stream_id, **kwargs) + + if settings and "ACK" in kwargs.get("flags", ()): + raise ValueError("Settings must be empty if ACK flag is set.") + + #: A dictionary of the setting type byte to the value of the setting. + self.settings = settings or {} + + def serialize_body(self): + return b''.join([_STRUCT_HL.pack(setting & 0xFF, value) + for setting, value in self.settings.items()]) + + def parse_body(self, data): + body_len = 0 + for i in range(0, len(data), 6): + try: + name, value = _STRUCT_HL.unpack(data[i:i+6]) + except struct.error: + raise InvalidFrameError("Invalid SETTINGS body") + + self.settings[name] = value + body_len += 6 + + self.body_len = body_len + + +class PushPromiseFrame(Padding, Frame): + """ + The PUSH_PROMISE frame is used to notify the peer endpoint in advance of + streams the sender intends to initiate. + """ + #: The flags defined for PUSH_PROMISE frames. + defined_flags = [ + Flag('END_HEADERS', 0x04), + Flag('PADDED', 0x08) + ] + + #: The type byte defined for PUSH_PROMISE frames. + type = 0x05 + + stream_association = _STREAM_ASSOC_HAS_STREAM + + def __init__(self, stream_id, promised_stream_id=0, data=b'', **kwargs): + super(PushPromiseFrame, self).__init__(stream_id, **kwargs) + + #: The stream ID that is promised by this frame. + self.promised_stream_id = promised_stream_id + + #: The HPACK-encoded header block for the simulated request on the new + #: stream. + self.data = data + + def serialize_body(self): + padding_data = self.serialize_padding_data() + padding = b'\0' * self.total_padding + data = _STRUCT_L.pack(self.promised_stream_id) + return b''.join([padding_data, data, self.data, padding]) + + def parse_body(self, data): + padding_data_length = self.parse_padding_data(data) + + try: + self.promised_stream_id = _STRUCT_L.unpack( + data[padding_data_length:padding_data_length + 4] + )[0] + except struct.error: + raise InvalidFrameError("Invalid PUSH_PROMISE body") + + self.data = data[padding_data_length + 4:].tobytes() + self.body_len = len(data) + + if self.total_padding and self.total_padding >= self.body_len: + raise InvalidPaddingError("Padding is too long.") + + +class PingFrame(Frame): + """ + The PING frame is a mechanism for measuring a minimal round-trip time from + the sender, as well as determining whether an idle connection is still + functional. PING frames can be sent from any endpoint. + """ + #: The flags defined for PING frames. + defined_flags = [Flag('ACK', 0x01)] + + #: The type byte defined for PING frames. + type = 0x06 + + stream_association = _STREAM_ASSOC_NO_STREAM + + def __init__(self, stream_id=0, opaque_data=b'', **kwargs): + super(PingFrame, self).__init__(stream_id, **kwargs) + + #: The opaque data sent in this PING frame, as a bytestring. + self.opaque_data = opaque_data + + def serialize_body(self): + if len(self.opaque_data) > 8: + raise InvalidFrameError( + "PING frame may not have more than 8 bytes of data, got %s" % + self.opaque_data + ) + + data = self.opaque_data + data += b'\x00' * (8 - len(self.opaque_data)) + return data + + def parse_body(self, data): + if len(data) != 8: + raise InvalidFrameError( + "PING frame must have 8 byte length: got %s" % len(data) + ) + + self.opaque_data = data.tobytes() + self.body_len = 8 + + +class GoAwayFrame(Frame): + """ + The GOAWAY frame informs the remote peer to stop creating streams on this + connection. It can be sent from the client or the server. Once sent, the + sender will ignore frames sent on new streams for the remainder of the + connection. + """ + #: The flags defined for GOAWAY frames. + defined_flags = [] + + #: The type byte defined for GOAWAY frames. + type = 0x07 + + stream_association = _STREAM_ASSOC_NO_STREAM + + def __init__(self, + stream_id=0, + last_stream_id=0, + error_code=0, + additional_data=b'', + **kwargs): + super(GoAwayFrame, self).__init__(stream_id, **kwargs) + + #: The last stream ID definitely seen by the remote peer. + self.last_stream_id = last_stream_id + + #: The error code for connection teardown. + self.error_code = error_code + + #: Any additional data sent in the GOAWAY. + self.additional_data = additional_data + + def serialize_body(self): + data = _STRUCT_LL.pack( + self.last_stream_id & 0x7FFFFFFF, + self.error_code + ) + data += self.additional_data + + return data + + def parse_body(self, data): + try: + self.last_stream_id, self.error_code = _STRUCT_LL.unpack( + data[:8] + ) + except struct.error: + raise InvalidFrameError("Invalid GOAWAY body.") + + self.body_len = len(data) + + if len(data) > 8: + self.additional_data = data[8:].tobytes() + + +class WindowUpdateFrame(Frame): + """ + The WINDOW_UPDATE frame is used to implement flow control. + + Flow control operates at two levels: on each individual stream and on the + entire connection. + + Both types of flow control are hop by hop; that is, only between the two + endpoints. Intermediaries do not forward WINDOW_UPDATE frames between + dependent connections. However, throttling of data transfer by any receiver + can indirectly cause the propagation of flow control information toward the + original sender. + """ + #: The flags defined for WINDOW_UPDATE frames. + defined_flags = [] + + #: The type byte defined for WINDOW_UPDATE frames. + type = 0x08 + + stream_association = _STREAM_ASSOC_EITHER + + def __init__(self, stream_id, window_increment=0, **kwargs): + super(WindowUpdateFrame, self).__init__(stream_id, **kwargs) + + #: The amount the flow control window is to be incremented. + self.window_increment = window_increment + + def serialize_body(self): + return _STRUCT_L.pack(self.window_increment & 0x7FFFFFFF) + + def parse_body(self, data): + try: + self.window_increment = _STRUCT_L.unpack(data)[0] + except struct.error: + raise InvalidFrameError("Invalid WINDOW_UPDATE body") + + self.body_len = 4 + + +class HeadersFrame(Padding, Priority, Frame): + """ + The HEADERS frame carries name-value pairs. It is used to open a stream. + HEADERS frames can be sent on a stream in the "open" or "half closed + (remote)" states. + + The HeadersFrame class is actually basically a data frame in this + implementation, because of the requirement to control the sizes of frames. + A header block fragment that doesn't fit in an entire HEADERS frame needs + to be followed with CONTINUATION frames. From the perspective of the frame + building code the header block is an opaque data segment. + """ + #: The flags defined for HEADERS frames. + defined_flags = [ + Flag('END_STREAM', 0x01), + Flag('END_HEADERS', 0x04), + Flag('PADDED', 0x08), + Flag('PRIORITY', 0x20), + ] + + #: The type byte defined for HEADERS frames. + type = 0x01 + + stream_association = _STREAM_ASSOC_HAS_STREAM + + def __init__(self, stream_id, data=b'', **kwargs): + super(HeadersFrame, self).__init__(stream_id, **kwargs) + + #: The HPACK-encoded header block. + self.data = data + + def serialize_body(self): + padding_data = self.serialize_padding_data() + padding = b'\0' * self.total_padding + + if 'PRIORITY' in self.flags: + priority_data = self.serialize_priority_data() + else: + priority_data = b'' + + return b''.join([padding_data, priority_data, self.data, padding]) + + def parse_body(self, data): + padding_data_length = self.parse_padding_data(data) + data = data[padding_data_length:] + + if 'PRIORITY' in self.flags: + priority_data_length = self.parse_priority_data(data) + else: + priority_data_length = 0 + + self.body_len = len(data) + self.data = ( + data[priority_data_length:len(data)-self.total_padding].tobytes() + ) + + if self.total_padding and self.total_padding >= self.body_len: + raise InvalidPaddingError("Padding is too long.") + + +class ContinuationFrame(Frame): + """ + The CONTINUATION frame is used to continue a sequence of header block + fragments. Any number of CONTINUATION frames can be sent on an existing + stream, as long as the preceding frame on the same stream is one of + HEADERS, PUSH_PROMISE or CONTINUATION without the END_HEADERS flag set. + + Much like the HEADERS frame, hyper treats this as an opaque data frame with + different flags and a different type. + """ + #: The flags defined for CONTINUATION frames. + defined_flags = [Flag('END_HEADERS', 0x04)] + + #: The type byte defined for CONTINUATION frames. + type = 0x09 + + stream_association = _STREAM_ASSOC_HAS_STREAM + + def __init__(self, stream_id, data=b'', **kwargs): + super(ContinuationFrame, self).__init__(stream_id, **kwargs) + + #: The HPACK-encoded header block. + self.data = data + + def serialize_body(self): + return self.data + + def parse_body(self, data): + self.data = data.tobytes() + self.body_len = len(data) + + +class AltSvcFrame(Frame): + """ + The ALTSVC frame is used to advertise alternate services that the current + host, or a different one, can understand. This frame is standardised as + part of RFC 7838. + + This frame does no work to validate that the ALTSVC field parameter is + acceptable per the rules of RFC 7838. + + .. note:: If the ``stream_id`` of this frame is nonzero, the origin field + must have zero length. Conversely, if the ``stream_id`` of this + frame is zero, the origin field must have nonzero length. Put + another way, a valid ALTSVC frame has ``stream_id != 0`` XOR + ``len(origin) != 0``. + """ + type = 0xA + + stream_association = _STREAM_ASSOC_EITHER + + def __init__(self, stream_id, origin=b'', field=b'', **kwargs): + super(AltSvcFrame, self).__init__(stream_id, **kwargs) + + if not isinstance(origin, bytes): + raise ValueError("AltSvc origin must be bytestring.") + if not isinstance(field, bytes): + raise ValueError("AltSvc field must be a bytestring.") + self.origin = origin + self.field = field + + def serialize_body(self): + origin_len = _STRUCT_H.pack(len(self.origin)) + return b''.join([origin_len, self.origin, self.field]) + + def parse_body(self, data): + try: + origin_len = _STRUCT_H.unpack(data[0:2])[0] + self.origin = data[2:2+origin_len].tobytes() + + if len(self.origin) != origin_len: + raise InvalidFrameError("Invalid ALTSVC frame body.") + + self.field = data[2+origin_len:].tobytes() + except (struct.error, ValueError): + raise InvalidFrameError("Invalid ALTSVC frame body.") + + self.body_len = len(data) + + +class ExtensionFrame(Frame): + """ + ExtensionFrame is used to wrap frames which are not natively interpretable + by hyperframe. + + Although certain byte prefixes are ordained by specification to have + certain contextual meanings, frames with other prefixes are not prohibited, + and may be used to communicate arbitrary meaning between HTTP/2 peers. + + Thus, hyperframe, rather than raising an exception when such a frame is + encountered, wraps it in a generic frame to be properly acted upon by + upstream consumers which might have additional context on how to use it. + + .. versionadded:: 5.0.0 + """ + + stream_association = _STREAM_ASSOC_EITHER + + def __init__(self, type, stream_id, **kwargs): + super(ExtensionFrame, self).__init__(stream_id, **kwargs) + self.type = type + self.flag_byte = None + + def parse_flags(self, flag_byte): + """ + For extension frames, we parse the flags by just storing a flag byte. + """ + self.flag_byte = flag_byte + + def parse_body(self, data): + self.body = data.tobytes() + self.body_len = len(data) + + def serialize(self): + """ + A broad override of the serialize method that ensures that the data + comes back out exactly as it came in. This should not be used in most + user code: it exists only as a helper method if frames need to be + reconstituted. + """ + # Build the frame header. + # First, get the flags. + flags = self.flag_byte + + header = _STRUCT_HBBBL.pack( + (self.body_len >> 8) & 0xFFFF, # Length spread over top 24 bits + self.body_len & 0xFF, + self.type, + flags, + self.stream_id & 0x7FFFFFFF # Stream ID is 32 bits. + ) + + return header + self.body + + +_FRAME_CLASSES = [ + DataFrame, + HeadersFrame, + PriorityFrame, + RstStreamFrame, + SettingsFrame, + PushPromiseFrame, + PingFrame, + GoAwayFrame, + WindowUpdateFrame, + ContinuationFrame, + AltSvcFrame, +] +#: FRAMES maps the type byte for each frame to the class used to represent that +#: frame. +FRAMES = {cls.type: cls for cls in _FRAME_CLASSES} diff --git a/py3/lib/python3.6/site-packages/idna-2.8.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/idna-2.8.dist-info/LICENSE.rst b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/LICENSE.rst new file mode 100644 index 0000000..3ee64fb --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/LICENSE.rst @@ -0,0 +1,80 @@ +License +------- + +Copyright (c) 2013-2018, Kim Davies. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +#. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +#. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +#. Neither the name of the copyright holder nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + +Portions of the codec implementation and unit tests are derived from the +Python standard library, which carries the `Python Software Foundation +License `_: + + Copyright (c) 2001-2014 Python Software Foundation; All Rights Reserved + +Portions of the unit tests are derived from the Unicode standard, which +is subject to the Unicode, Inc. License Agreement: + + Copyright (c) 1991-2014 Unicode, Inc. All rights reserved. + Distributed under the Terms of Use in + . + + Permission is hereby granted, free of charge, to any person obtaining + a copy of the Unicode data files and any associated documentation + (the "Data Files") or Unicode software and any associated documentation + (the "Software") to deal in the Data Files or Software + without restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, and/or sell copies of + the Data Files or Software, and to permit persons to whom the Data Files + or Software are furnished to do so, provided that + + (a) this copyright and permission notice appear with all copies + of the Data Files or Software, + + (b) this copyright and permission notice appear in associated + documentation, and + + (c) there is clear notice in each modified Data File or in the Software + as well as in the documentation associated with the Data File(s) or + Software that the data or software has been modified. + + THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE + WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT OF THIRD PARTY RIGHTS. + IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS + NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL + DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, + DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THE DATA FILES OR SOFTWARE. + + Except as contained in this notice, the name of a copyright holder + shall not be used in advertising or otherwise to promote the sale, + use or other dealings in these Data Files or Software without prior + written authorization of the copyright holder. diff --git a/py3/lib/python3.6/site-packages/idna-2.8.dist-info/METADATA b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/METADATA new file mode 100644 index 0000000..30fde02 --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/METADATA @@ -0,0 +1,239 @@ +Metadata-Version: 2.1 +Name: idna +Version: 2.8 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-like +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna” module that +comes with the Python standard library, but only supports the +old, deprecated IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + # Python 2 + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library works with Python 2.7 and Python 3.4 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + # Python 2 + >>> import idna.codec + >>> print u'домена.испытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + # Python 2 + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification no longer normalizes input from different potential ways a user +may input a domain name. This functionality, known as a “mapping”, is now +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + # Python 2 + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup tables for +performance. These tables are derived from computing against eligibility criteria +in the respective standards. These tables are computed using the command-line +script ``tools/idna-data``. + +This tool will fetch relevant tables from the Unicode Consortium and perform the +required calculations to identify eligibility. It has three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``, + the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors + who wish to track this library against a different Unicode version may use this tool + to manually generate a different version of the ``idnadata.py`` and ``uts46data.py`` + files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC + 5892 and the pre-computed tables published by `IANA `_. + +* ``idna-data U+0061``. Prints debugging output on the various properties + associated with an individual Unicode codepoint (in this case, U+0061), that are + used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging + or analysis. + +The tool accepts a number of arguments, described using ``idna-data -h``. Most notably, +the ``--version`` argument allows the specification of the version of Unicode to use +in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata`` +will generate library data against Unicode 9.0.0. + +Note that this script requires Python 3, but all generated library data will work +in Python 2.7. + + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as tests that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/py3/lib/python3.6/site-packages/idna-2.8.dist-info/RECORD b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/RECORD new file mode 100644 index 0000000..78f97f3 --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/RECORD @@ -0,0 +1,22 @@ +idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 +idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 +idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 +idna/core.py,sha256=JDCZZ_PLESqIgEbU8mPyoEufWwoOiIqygA17-QZIe3s,11733 +idna/idnadata.py,sha256=HXaPFw6_YAJ0qppACPu0YLAULtRs3QovRM_CCZHGdY0,40899 +idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 +idna/package_data.py,sha256=kIzeKKXEouXLR4srqwf9Q3zv-NffKSOz5aSDOJARPB0,21 +idna/uts46data.py,sha256=oLyNZ1pBaiBlj9zFzLFRd_P7J8MkRcgDisjExZR_4MY,198292 +idna-2.8.dist-info/LICENSE.rst,sha256=DUvHq9SNz7FOJCVO5AQGZzf_AWcUTiIpFKIRO4eUaD4,3947 +idna-2.8.dist-info/METADATA,sha256=X4QsM_BLMPhl4gC8SEnXjvl5-gj7hvwAl7UCyR418so,8862 +idna-2.8.dist-info/WHEEL,sha256=CihQvCnsGZQBGAHLEUMf0IdA4fRduS_NBUTMgCTtvPM,110 +idna-2.8.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna-2.8.dist-info/RECORD,, +idna-2.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna/__pycache__/codec.cpython-36.pyc,, +idna/__pycache__/compat.cpython-36.pyc,, +idna/__pycache__/uts46data.cpython-36.pyc,, +idna/__pycache__/intranges.cpython-36.pyc,, +idna/__pycache__/core.cpython-36.pyc,, +idna/__pycache__/__init__.cpython-36.pyc,, +idna/__pycache__/idnadata.cpython-36.pyc,, +idna/__pycache__/package_data.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/idna-2.8.dist-info/WHEEL b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/WHEEL new file mode 100644 index 0000000..dea0e20 --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/idna-2.8.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/top_level.txt new file mode 100644 index 0000000..c40472e --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna-2.8.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/py3/lib/python3.6/site-packages/idna/__init__.py b/py3/lib/python3.6/site-packages/idna/__init__.py new file mode 100644 index 0000000..847bf93 --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna/__init__.py @@ -0,0 +1,2 @@ +from .package_data import __version__ +from .core import * diff --git a/py3/lib/python3.6/site-packages/idna/codec.py b/py3/lib/python3.6/site-packages/idna/codec.py new file mode 100644 index 0000000..98c65ea --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna/codec.py @@ -0,0 +1,118 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return u"", 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return ("", 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return (u"", 0) + + # IDNA allows decoding to operate on Unicode strings, too. + if isinstance(data, unicode): + labels = _unicode_dots_re.split(data) + else: + # Must be ASCII string + data = str(data) + unicode(data, "ascii") + labels = data.split(".") + + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = u'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = u'.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = u".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/py3/lib/python3.6/site-packages/idna/compat.py b/py3/lib/python3.6/site-packages/idna/compat.py new file mode 100644 index 0000000..4d47f33 --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna/compat.py @@ -0,0 +1,12 @@ +from .core import * +from .codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") + diff --git a/py3/lib/python3.6/site-packages/idna/core.py b/py3/lib/python3.6/site-packages/idna/core.py new file mode 100644 index 0000000..104624a --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna/core.py @@ -0,0 +1,396 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +if sys.version_info[0] == 3: + unicode = str + unichr = chr + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + v = unicodedata.combining(unichr(cp)) + if v == 0: + if not unicodedata.name(unichr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + +def _is_script(cp, script): + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{0:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == u'\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + ulabel(label) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = unicode(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = u"" + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == "V" or + (status == "D" and not transitional) or + (status == "3" and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == "M" or + (status == "3" and not std3_rules) or + (status == "D" and transitional)): + output += replacement + elif status != "I": + raise IndexError() + return unicodedata.normalize("NFC", output) + except IndexError: + raise InvalidCodepoint( + "Codepoint {0} not allowed at position {1} in {2}".format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(u'.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(u'') + return u'.'.join(result) diff --git a/py3/lib/python3.6/site-packages/idna/idnadata.py b/py3/lib/python3.6/site-packages/idna/idnadata.py new file mode 100644 index 0000000..a80c959 --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna/idnadata.py @@ -0,0 +1,1979 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "11.0.0" +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004db6, + 0x4e0000009ff0, + 0xf9000000fa6e, + 0xfa700000fada, + 0x200000002a6d7, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2f8000002fa1e, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5ef000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b11f, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1b0000001b001, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x605: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x70f: 84, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 85, + 0x857: 85, + 0x858: 85, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8e2: 85, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x202f: 85, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f45: 85, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x110bd: 85, + 0x110cd: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56000000587, + 0x58800000589, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5ef000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x7fd000007fe, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x8a0000008b5, + 0x8b6000008be, + 0x8d3000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0x9fe000009ff, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5600000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3d00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcde00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf3, + 0xd0000000d04, + 0xd0500000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8200000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8700000e89, + 0xe8a00000e8b, + 0xe8d00000e8e, + 0xe9400000e98, + 0xe9900000ea0, + 0xea100000ea4, + 0xea500000ea6, + 0xea700000ea8, + 0xeaa00000eac, + 0xead00000eb3, + 0xeb400000eba, + 0xebb00000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ece, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x17000000170d, + 0x170e00001715, + 0x172000001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001879, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1b0000001b4c, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfa, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001dfa, + 0x1dfb00001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c5f, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x310500003130, + 0x31a0000031bb, + 0x31f000003200, + 0x340000004db6, + 0x4e0000009ff0, + 0xa0000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7af0000a7b0, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7b90000a7ba, + 0xa7f70000a7f8, + 0xa7fa0000a828, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab66, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a36, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10f0000010f1d, + 0x10f2700010f28, + 0x10f3000010f51, + 0x1100000011047, + 0x1106600011070, + 0x1107f000110bb, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1114400011147, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111c9000111cd, + 0x111d0000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e0001123f, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133b00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x1145e0001145f, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b8, + 0x116c0000116ca, + 0x117000001171b, + 0x1171d0001172c, + 0x117300001173a, + 0x118000001183b, + 0x118c0000118ea, + 0x118ff00011900, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a84, + 0x11a8600011a9a, + 0x11a9d00011a9e, + 0x11ac000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, + 0x120000001239a, + 0x1248000012544, + 0x130000001342f, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16e6000016e80, + 0x16f0000016f45, + 0x16f5000016f7f, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x17000000187f2, + 0x1880000018af3, + 0x1b0000001b11f, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94b, + 0x1e9500001e95a, + 0x200000002a6d7, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/py3/lib/python3.6/site-packages/idna/intranges.py b/py3/lib/python3.6/site-packages/idna/intranges.py new file mode 100644 index 0000000..fa8a735 --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna/intranges.py @@ -0,0 +1,53 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect + +def intranges_from_list(list_): + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start, end): + return (start << 32) | end + +def _decode_range(r): + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_, ranges): + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/py3/lib/python3.6/site-packages/idna/package_data.py b/py3/lib/python3.6/site-packages/idna/package_data.py new file mode 100644 index 0000000..257e898 --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '2.8' + diff --git a/py3/lib/python3.6/site-packages/idna/uts46data.py b/py3/lib/python3.6/site-packages/idna/uts46data.py new file mode 100644 index 0000000..a68ed4c --- /dev/null +++ b/py3/lib/python3.6/site-packages/idna/uts46data.py @@ -0,0 +1,8205 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "11.0.0" +def _seg_0(): + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', u'a'), + (0x42, 'M', u'b'), + (0x43, 'M', u'c'), + (0x44, 'M', u'd'), + (0x45, 'M', u'e'), + (0x46, 'M', u'f'), + (0x47, 'M', u'g'), + (0x48, 'M', u'h'), + (0x49, 'M', u'i'), + (0x4A, 'M', u'j'), + (0x4B, 'M', u'k'), + (0x4C, 'M', u'l'), + (0x4D, 'M', u'm'), + (0x4E, 'M', u'n'), + (0x4F, 'M', u'o'), + (0x50, 'M', u'p'), + (0x51, 'M', u'q'), + (0x52, 'M', u'r'), + (0x53, 'M', u's'), + (0x54, 'M', u't'), + (0x55, 'M', u'u'), + (0x56, 'M', u'v'), + (0x57, 'M', u'w'), + (0x58, 'M', u'x'), + (0x59, 'M', u'y'), + (0x5A, 'M', u'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1(): + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', u' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', u' ̈'), + (0xA9, 'V'), + (0xAA, 'M', u'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', u' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', u'2'), + (0xB3, 'M', u'3'), + (0xB4, '3', u' ́'), + (0xB5, 'M', u'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', u' ̧'), + (0xB9, 'M', u'1'), + (0xBA, 'M', u'o'), + (0xBB, 'V'), + (0xBC, 'M', u'1⁄4'), + (0xBD, 'M', u'1⁄2'), + (0xBE, 'M', u'3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', u'à'), + (0xC1, 'M', u'á'), + (0xC2, 'M', u'â'), + (0xC3, 'M', u'ã'), + (0xC4, 'M', u'ä'), + (0xC5, 'M', u'å'), + (0xC6, 'M', u'æ'), + (0xC7, 'M', u'ç'), + ] + +def _seg_2(): + return [ + (0xC8, 'M', u'è'), + (0xC9, 'M', u'é'), + (0xCA, 'M', u'ê'), + (0xCB, 'M', u'ë'), + (0xCC, 'M', u'ì'), + (0xCD, 'M', u'í'), + (0xCE, 'M', u'î'), + (0xCF, 'M', u'ï'), + (0xD0, 'M', u'ð'), + (0xD1, 'M', u'ñ'), + (0xD2, 'M', u'ò'), + (0xD3, 'M', u'ó'), + (0xD4, 'M', u'ô'), + (0xD5, 'M', u'õ'), + (0xD6, 'M', u'ö'), + (0xD7, 'V'), + (0xD8, 'M', u'ø'), + (0xD9, 'M', u'ù'), + (0xDA, 'M', u'ú'), + (0xDB, 'M', u'û'), + (0xDC, 'M', u'ü'), + (0xDD, 'M', u'ý'), + (0xDE, 'M', u'þ'), + (0xDF, 'D', u'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', u'ā'), + (0x101, 'V'), + (0x102, 'M', u'ă'), + (0x103, 'V'), + (0x104, 'M', u'ą'), + (0x105, 'V'), + (0x106, 'M', u'ć'), + (0x107, 'V'), + (0x108, 'M', u'ĉ'), + (0x109, 'V'), + (0x10A, 'M', u'ċ'), + (0x10B, 'V'), + (0x10C, 'M', u'č'), + (0x10D, 'V'), + (0x10E, 'M', u'ď'), + (0x10F, 'V'), + (0x110, 'M', u'đ'), + (0x111, 'V'), + (0x112, 'M', u'ē'), + (0x113, 'V'), + (0x114, 'M', u'ĕ'), + (0x115, 'V'), + (0x116, 'M', u'ė'), + (0x117, 'V'), + (0x118, 'M', u'ę'), + (0x119, 'V'), + (0x11A, 'M', u'ě'), + (0x11B, 'V'), + (0x11C, 'M', u'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', u'ğ'), + (0x11F, 'V'), + (0x120, 'M', u'ġ'), + (0x121, 'V'), + (0x122, 'M', u'ģ'), + (0x123, 'V'), + (0x124, 'M', u'ĥ'), + (0x125, 'V'), + (0x126, 'M', u'ħ'), + (0x127, 'V'), + (0x128, 'M', u'ĩ'), + (0x129, 'V'), + (0x12A, 'M', u'ī'), + (0x12B, 'V'), + ] + +def _seg_3(): + return [ + (0x12C, 'M', u'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', u'į'), + (0x12F, 'V'), + (0x130, 'M', u'i̇'), + (0x131, 'V'), + (0x132, 'M', u'ij'), + (0x134, 'M', u'ĵ'), + (0x135, 'V'), + (0x136, 'M', u'ķ'), + (0x137, 'V'), + (0x139, 'M', u'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', u'ļ'), + (0x13C, 'V'), + (0x13D, 'M', u'ľ'), + (0x13E, 'V'), + (0x13F, 'M', u'l·'), + (0x141, 'M', u'ł'), + (0x142, 'V'), + (0x143, 'M', u'ń'), + (0x144, 'V'), + (0x145, 'M', u'ņ'), + (0x146, 'V'), + (0x147, 'M', u'ň'), + (0x148, 'V'), + (0x149, 'M', u'ʼn'), + (0x14A, 'M', u'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', u'ō'), + (0x14D, 'V'), + (0x14E, 'M', u'ŏ'), + (0x14F, 'V'), + (0x150, 'M', u'ő'), + (0x151, 'V'), + (0x152, 'M', u'œ'), + (0x153, 'V'), + (0x154, 'M', u'ŕ'), + (0x155, 'V'), + (0x156, 'M', u'ŗ'), + (0x157, 'V'), + (0x158, 'M', u'ř'), + (0x159, 'V'), + (0x15A, 'M', u'ś'), + (0x15B, 'V'), + (0x15C, 'M', u'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', u'ş'), + (0x15F, 'V'), + (0x160, 'M', u'š'), + (0x161, 'V'), + (0x162, 'M', u'ţ'), + (0x163, 'V'), + (0x164, 'M', u'ť'), + (0x165, 'V'), + (0x166, 'M', u'ŧ'), + (0x167, 'V'), + (0x168, 'M', u'ũ'), + (0x169, 'V'), + (0x16A, 'M', u'ū'), + (0x16B, 'V'), + (0x16C, 'M', u'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', u'ů'), + (0x16F, 'V'), + (0x170, 'M', u'ű'), + (0x171, 'V'), + (0x172, 'M', u'ų'), + (0x173, 'V'), + (0x174, 'M', u'ŵ'), + (0x175, 'V'), + (0x176, 'M', u'ŷ'), + (0x177, 'V'), + (0x178, 'M', u'ÿ'), + (0x179, 'M', u'ź'), + (0x17A, 'V'), + (0x17B, 'M', u'ż'), + (0x17C, 'V'), + (0x17D, 'M', u'ž'), + (0x17E, 'V'), + (0x17F, 'M', u's'), + (0x180, 'V'), + (0x181, 'M', u'ɓ'), + (0x182, 'M', u'ƃ'), + (0x183, 'V'), + (0x184, 'M', u'ƅ'), + (0x185, 'V'), + (0x186, 'M', u'ɔ'), + (0x187, 'M', u'ƈ'), + (0x188, 'V'), + (0x189, 'M', u'ɖ'), + (0x18A, 'M', u'ɗ'), + (0x18B, 'M', u'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', u'ǝ'), + (0x18F, 'M', u'ə'), + (0x190, 'M', u'ɛ'), + (0x191, 'M', u'ƒ'), + (0x192, 'V'), + (0x193, 'M', u'ɠ'), + ] + +def _seg_4(): + return [ + (0x194, 'M', u'ɣ'), + (0x195, 'V'), + (0x196, 'M', u'ɩ'), + (0x197, 'M', u'ɨ'), + (0x198, 'M', u'ƙ'), + (0x199, 'V'), + (0x19C, 'M', u'ɯ'), + (0x19D, 'M', u'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', u'ɵ'), + (0x1A0, 'M', u'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', u'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', u'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', u'ʀ'), + (0x1A7, 'M', u'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', u'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', u'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', u'ʈ'), + (0x1AF, 'M', u'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', u'ʊ'), + (0x1B2, 'M', u'ʋ'), + (0x1B3, 'M', u'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', u'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', u'ʒ'), + (0x1B8, 'M', u'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', u'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', u'dž'), + (0x1C7, 'M', u'lj'), + (0x1CA, 'M', u'nj'), + (0x1CD, 'M', u'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', u'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', u'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', u'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', u'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', u'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', u'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', u'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', u'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', u'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', u'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', u'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', u'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', u'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', u'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', u'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', u'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', u'dz'), + (0x1F4, 'M', u'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', u'ƕ'), + (0x1F7, 'M', u'ƿ'), + (0x1F8, 'M', u'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', u'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', u'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', u'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', u'ȁ'), + (0x201, 'V'), + (0x202, 'M', u'ȃ'), + (0x203, 'V'), + (0x204, 'M', u'ȅ'), + (0x205, 'V'), + (0x206, 'M', u'ȇ'), + (0x207, 'V'), + (0x208, 'M', u'ȉ'), + (0x209, 'V'), + (0x20A, 'M', u'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', u'ȍ'), + ] + +def _seg_5(): + return [ + (0x20D, 'V'), + (0x20E, 'M', u'ȏ'), + (0x20F, 'V'), + (0x210, 'M', u'ȑ'), + (0x211, 'V'), + (0x212, 'M', u'ȓ'), + (0x213, 'V'), + (0x214, 'M', u'ȕ'), + (0x215, 'V'), + (0x216, 'M', u'ȗ'), + (0x217, 'V'), + (0x218, 'M', u'ș'), + (0x219, 'V'), + (0x21A, 'M', u'ț'), + (0x21B, 'V'), + (0x21C, 'M', u'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', u'ȟ'), + (0x21F, 'V'), + (0x220, 'M', u'ƞ'), + (0x221, 'V'), + (0x222, 'M', u'ȣ'), + (0x223, 'V'), + (0x224, 'M', u'ȥ'), + (0x225, 'V'), + (0x226, 'M', u'ȧ'), + (0x227, 'V'), + (0x228, 'M', u'ȩ'), + (0x229, 'V'), + (0x22A, 'M', u'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', u'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', u'ȯ'), + (0x22F, 'V'), + (0x230, 'M', u'ȱ'), + (0x231, 'V'), + (0x232, 'M', u'ȳ'), + (0x233, 'V'), + (0x23A, 'M', u'ⱥ'), + (0x23B, 'M', u'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', u'ƚ'), + (0x23E, 'M', u'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', u'ɂ'), + (0x242, 'V'), + (0x243, 'M', u'ƀ'), + (0x244, 'M', u'ʉ'), + (0x245, 'M', u'ʌ'), + (0x246, 'M', u'ɇ'), + (0x247, 'V'), + (0x248, 'M', u'ɉ'), + (0x249, 'V'), + (0x24A, 'M', u'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', u'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', u'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', u'h'), + (0x2B1, 'M', u'ɦ'), + (0x2B2, 'M', u'j'), + (0x2B3, 'M', u'r'), + (0x2B4, 'M', u'ɹ'), + (0x2B5, 'M', u'ɻ'), + (0x2B6, 'M', u'ʁ'), + (0x2B7, 'M', u'w'), + (0x2B8, 'M', u'y'), + (0x2B9, 'V'), + (0x2D8, '3', u' ̆'), + (0x2D9, '3', u' ̇'), + (0x2DA, '3', u' ̊'), + (0x2DB, '3', u' ̨'), + (0x2DC, '3', u' ̃'), + (0x2DD, '3', u' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', u'ɣ'), + (0x2E1, 'M', u'l'), + (0x2E2, 'M', u's'), + (0x2E3, 'M', u'x'), + (0x2E4, 'M', u'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', u'̀'), + (0x341, 'M', u'́'), + (0x342, 'V'), + (0x343, 'M', u'̓'), + (0x344, 'M', u'̈́'), + (0x345, 'M', u'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', u'ͱ'), + (0x371, 'V'), + (0x372, 'M', u'ͳ'), + (0x373, 'V'), + (0x374, 'M', u'ʹ'), + (0x375, 'V'), + (0x376, 'M', u'ͷ'), + (0x377, 'V'), + ] + +def _seg_6(): + return [ + (0x378, 'X'), + (0x37A, '3', u' ι'), + (0x37B, 'V'), + (0x37E, '3', u';'), + (0x37F, 'M', u'ϳ'), + (0x380, 'X'), + (0x384, '3', u' ́'), + (0x385, '3', u' ̈́'), + (0x386, 'M', u'ά'), + (0x387, 'M', u'·'), + (0x388, 'M', u'έ'), + (0x389, 'M', u'ή'), + (0x38A, 'M', u'ί'), + (0x38B, 'X'), + (0x38C, 'M', u'ό'), + (0x38D, 'X'), + (0x38E, 'M', u'ύ'), + (0x38F, 'M', u'ώ'), + (0x390, 'V'), + (0x391, 'M', u'α'), + (0x392, 'M', u'β'), + (0x393, 'M', u'γ'), + (0x394, 'M', u'δ'), + (0x395, 'M', u'ε'), + (0x396, 'M', u'ζ'), + (0x397, 'M', u'η'), + (0x398, 'M', u'θ'), + (0x399, 'M', u'ι'), + (0x39A, 'M', u'κ'), + (0x39B, 'M', u'λ'), + (0x39C, 'M', u'μ'), + (0x39D, 'M', u'ν'), + (0x39E, 'M', u'ξ'), + (0x39F, 'M', u'ο'), + (0x3A0, 'M', u'π'), + (0x3A1, 'M', u'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', u'σ'), + (0x3A4, 'M', u'τ'), + (0x3A5, 'M', u'υ'), + (0x3A6, 'M', u'φ'), + (0x3A7, 'M', u'χ'), + (0x3A8, 'M', u'ψ'), + (0x3A9, 'M', u'ω'), + (0x3AA, 'M', u'ϊ'), + (0x3AB, 'M', u'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', u'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', u'ϗ'), + (0x3D0, 'M', u'β'), + (0x3D1, 'M', u'θ'), + (0x3D2, 'M', u'υ'), + (0x3D3, 'M', u'ύ'), + (0x3D4, 'M', u'ϋ'), + (0x3D5, 'M', u'φ'), + (0x3D6, 'M', u'π'), + (0x3D7, 'V'), + (0x3D8, 'M', u'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', u'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', u'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', u'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', u'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', u'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', u'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', u'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', u'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', u'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', u'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', u'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', u'κ'), + (0x3F1, 'M', u'ρ'), + (0x3F2, 'M', u'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', u'θ'), + (0x3F5, 'M', u'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', u'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', u'σ'), + (0x3FA, 'M', u'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', u'ͻ'), + (0x3FE, 'M', u'ͼ'), + (0x3FF, 'M', u'ͽ'), + (0x400, 'M', u'ѐ'), + (0x401, 'M', u'ё'), + (0x402, 'M', u'ђ'), + ] + +def _seg_7(): + return [ + (0x403, 'M', u'ѓ'), + (0x404, 'M', u'є'), + (0x405, 'M', u'ѕ'), + (0x406, 'M', u'і'), + (0x407, 'M', u'ї'), + (0x408, 'M', u'ј'), + (0x409, 'M', u'љ'), + (0x40A, 'M', u'њ'), + (0x40B, 'M', u'ћ'), + (0x40C, 'M', u'ќ'), + (0x40D, 'M', u'ѝ'), + (0x40E, 'M', u'ў'), + (0x40F, 'M', u'џ'), + (0x410, 'M', u'а'), + (0x411, 'M', u'б'), + (0x412, 'M', u'в'), + (0x413, 'M', u'г'), + (0x414, 'M', u'д'), + (0x415, 'M', u'е'), + (0x416, 'M', u'ж'), + (0x417, 'M', u'з'), + (0x418, 'M', u'и'), + (0x419, 'M', u'й'), + (0x41A, 'M', u'к'), + (0x41B, 'M', u'л'), + (0x41C, 'M', u'м'), + (0x41D, 'M', u'н'), + (0x41E, 'M', u'о'), + (0x41F, 'M', u'п'), + (0x420, 'M', u'р'), + (0x421, 'M', u'с'), + (0x422, 'M', u'т'), + (0x423, 'M', u'у'), + (0x424, 'M', u'ф'), + (0x425, 'M', u'х'), + (0x426, 'M', u'ц'), + (0x427, 'M', u'ч'), + (0x428, 'M', u'ш'), + (0x429, 'M', u'щ'), + (0x42A, 'M', u'ъ'), + (0x42B, 'M', u'ы'), + (0x42C, 'M', u'ь'), + (0x42D, 'M', u'э'), + (0x42E, 'M', u'ю'), + (0x42F, 'M', u'я'), + (0x430, 'V'), + (0x460, 'M', u'ѡ'), + (0x461, 'V'), + (0x462, 'M', u'ѣ'), + (0x463, 'V'), + (0x464, 'M', u'ѥ'), + (0x465, 'V'), + (0x466, 'M', u'ѧ'), + (0x467, 'V'), + (0x468, 'M', u'ѩ'), + (0x469, 'V'), + (0x46A, 'M', u'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', u'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', u'ѯ'), + (0x46F, 'V'), + (0x470, 'M', u'ѱ'), + (0x471, 'V'), + (0x472, 'M', u'ѳ'), + (0x473, 'V'), + (0x474, 'M', u'ѵ'), + (0x475, 'V'), + (0x476, 'M', u'ѷ'), + (0x477, 'V'), + (0x478, 'M', u'ѹ'), + (0x479, 'V'), + (0x47A, 'M', u'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', u'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', u'ѿ'), + (0x47F, 'V'), + (0x480, 'M', u'ҁ'), + (0x481, 'V'), + (0x48A, 'M', u'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', u'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', u'ҏ'), + (0x48F, 'V'), + (0x490, 'M', u'ґ'), + (0x491, 'V'), + (0x492, 'M', u'ғ'), + (0x493, 'V'), + (0x494, 'M', u'ҕ'), + (0x495, 'V'), + (0x496, 'M', u'җ'), + (0x497, 'V'), + (0x498, 'M', u'ҙ'), + (0x499, 'V'), + (0x49A, 'M', u'қ'), + (0x49B, 'V'), + (0x49C, 'M', u'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8(): + return [ + (0x49E, 'M', u'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', u'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', u'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', u'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', u'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', u'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', u'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', u'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', u'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', u'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', u'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', u'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', u'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', u'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', u'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', u'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', u'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', u'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', u'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', u'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', u'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', u'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', u'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', u'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', u'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', u'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', u'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', u'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', u'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', u'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', u'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', u'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', u'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', u'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', u'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', u'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', u'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', u'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', u'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', u'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', u'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', u'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', u'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', u'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', u'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', u'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', u'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', u'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', u'ԁ'), + (0x501, 'V'), + (0x502, 'M', u'ԃ'), + ] + +def _seg_9(): + return [ + (0x503, 'V'), + (0x504, 'M', u'ԅ'), + (0x505, 'V'), + (0x506, 'M', u'ԇ'), + (0x507, 'V'), + (0x508, 'M', u'ԉ'), + (0x509, 'V'), + (0x50A, 'M', u'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', u'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', u'ԏ'), + (0x50F, 'V'), + (0x510, 'M', u'ԑ'), + (0x511, 'V'), + (0x512, 'M', u'ԓ'), + (0x513, 'V'), + (0x514, 'M', u'ԕ'), + (0x515, 'V'), + (0x516, 'M', u'ԗ'), + (0x517, 'V'), + (0x518, 'M', u'ԙ'), + (0x519, 'V'), + (0x51A, 'M', u'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', u'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', u'ԟ'), + (0x51F, 'V'), + (0x520, 'M', u'ԡ'), + (0x521, 'V'), + (0x522, 'M', u'ԣ'), + (0x523, 'V'), + (0x524, 'M', u'ԥ'), + (0x525, 'V'), + (0x526, 'M', u'ԧ'), + (0x527, 'V'), + (0x528, 'M', u'ԩ'), + (0x529, 'V'), + (0x52A, 'M', u'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', u'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', u'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', u'ա'), + (0x532, 'M', u'բ'), + (0x533, 'M', u'գ'), + (0x534, 'M', u'դ'), + (0x535, 'M', u'ե'), + (0x536, 'M', u'զ'), + (0x537, 'M', u'է'), + (0x538, 'M', u'ը'), + (0x539, 'M', u'թ'), + (0x53A, 'M', u'ժ'), + (0x53B, 'M', u'ի'), + (0x53C, 'M', u'լ'), + (0x53D, 'M', u'խ'), + (0x53E, 'M', u'ծ'), + (0x53F, 'M', u'կ'), + (0x540, 'M', u'հ'), + (0x541, 'M', u'ձ'), + (0x542, 'M', u'ղ'), + (0x543, 'M', u'ճ'), + (0x544, 'M', u'մ'), + (0x545, 'M', u'յ'), + (0x546, 'M', u'ն'), + (0x547, 'M', u'շ'), + (0x548, 'M', u'ո'), + (0x549, 'M', u'չ'), + (0x54A, 'M', u'պ'), + (0x54B, 'M', u'ջ'), + (0x54C, 'M', u'ռ'), + (0x54D, 'M', u'ս'), + (0x54E, 'M', u'վ'), + (0x54F, 'M', u'տ'), + (0x550, 'M', u'ր'), + (0x551, 'M', u'ց'), + (0x552, 'M', u'ւ'), + (0x553, 'M', u'փ'), + (0x554, 'M', u'ք'), + (0x555, 'M', u'օ'), + (0x556, 'M', u'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', u'եւ'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + ] + +def _seg_10(): + return [ + (0x675, 'M', u'اٴ'), + (0x676, 'M', u'وٴ'), + (0x677, 'M', u'ۇٴ'), + (0x678, 'M', u'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x8A0, 'V'), + (0x8B5, 'X'), + (0x8B6, 'V'), + (0x8BE, 'X'), + (0x8D3, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', u'क़'), + (0x959, 'M', u'ख़'), + (0x95A, 'M', u'ग़'), + (0x95B, 'M', u'ज़'), + (0x95C, 'M', u'ड़'), + (0x95D, 'M', u'ढ़'), + (0x95E, 'M', u'फ़'), + (0x95F, 'M', u'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', u'ড়'), + (0x9DD, 'M', u'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', u'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', u'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', u'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), + ] + +def _seg_11(): + return [ + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', u'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB56, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', u'ଡ଼'), + (0xB5D, 'M', u'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + ] + +def _seg_12(): + return [ + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC78, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD00, 'V'), + (0xD04, 'X'), + (0xD05, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD82, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', u'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE87, 'V'), + (0xE89, 'X'), + (0xE8A, 'V'), + (0xE8B, 'X'), + (0xE8D, 'V'), + (0xE8E, 'X'), + (0xE94, 'V'), + ] + +def _seg_13(): + return [ + (0xE98, 'X'), + (0xE99, 'V'), + (0xEA0, 'X'), + (0xEA1, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEA8, 'X'), + (0xEAA, 'V'), + (0xEAC, 'X'), + (0xEAD, 'V'), + (0xEB3, 'M', u'ໍາ'), + (0xEB4, 'V'), + (0xEBA, 'X'), + (0xEBB, 'V'), + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', u'ຫນ'), + (0xEDD, 'M', u'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', u'་'), + (0xF0D, 'V'), + (0xF43, 'M', u'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', u'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', u'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', u'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', u'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', u'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', u'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', u'ཱུ'), + (0xF76, 'M', u'ྲྀ'), + (0xF77, 'M', u'ྲཱྀ'), + (0xF78, 'M', u'ླྀ'), + (0xF79, 'M', u'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', u'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', u'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', u'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', u'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', u'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', u'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', u'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', u'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', u'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', u'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + ] + +def _seg_14(): + return [ + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', u'Ᏸ'), + (0x13F9, 'M', u'Ᏹ'), + (0x13FA, 'M', u'Ᏺ'), + (0x13FB, 'M', u'Ᏻ'), + (0x13FC, 'M', u'Ᏼ'), + (0x13FD, 'M', u'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + ] + +def _seg_15(): + return [ + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ABF, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', u'в'), + (0x1C81, 'M', u'д'), + (0x1C82, 'M', u'о'), + (0x1C83, 'M', u'с'), + (0x1C84, 'M', u'т'), + (0x1C86, 'M', u'ъ'), + (0x1C87, 'M', u'ѣ'), + (0x1C88, 'M', u'ꙋ'), + (0x1C89, 'X'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFA, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', u'a'), + (0x1D2D, 'M', u'æ'), + (0x1D2E, 'M', u'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', u'd'), + (0x1D31, 'M', u'e'), + (0x1D32, 'M', u'ǝ'), + (0x1D33, 'M', u'g'), + (0x1D34, 'M', u'h'), + (0x1D35, 'M', u'i'), + (0x1D36, 'M', u'j'), + (0x1D37, 'M', u'k'), + (0x1D38, 'M', u'l'), + (0x1D39, 'M', u'm'), + (0x1D3A, 'M', u'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', u'o'), + (0x1D3D, 'M', u'ȣ'), + (0x1D3E, 'M', u'p'), + (0x1D3F, 'M', u'r'), + (0x1D40, 'M', u't'), + (0x1D41, 'M', u'u'), + (0x1D42, 'M', u'w'), + (0x1D43, 'M', u'a'), + (0x1D44, 'M', u'ɐ'), + (0x1D45, 'M', u'ɑ'), + (0x1D46, 'M', u'ᴂ'), + (0x1D47, 'M', u'b'), + (0x1D48, 'M', u'd'), + (0x1D49, 'M', u'e'), + (0x1D4A, 'M', u'ə'), + (0x1D4B, 'M', u'ɛ'), + (0x1D4C, 'M', u'ɜ'), + (0x1D4D, 'M', u'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', u'k'), + (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'ŋ'), + (0x1D52, 'M', u'o'), + (0x1D53, 'M', u'ɔ'), + (0x1D54, 'M', u'ᴖ'), + (0x1D55, 'M', u'ᴗ'), + (0x1D56, 'M', u'p'), + (0x1D57, 'M', u't'), + (0x1D58, 'M', u'u'), + (0x1D59, 'M', u'ᴝ'), + (0x1D5A, 'M', u'ɯ'), + (0x1D5B, 'M', u'v'), + (0x1D5C, 'M', u'ᴥ'), + (0x1D5D, 'M', u'β'), + (0x1D5E, 'M', u'γ'), + (0x1D5F, 'M', u'δ'), + (0x1D60, 'M', u'φ'), + (0x1D61, 'M', u'χ'), + (0x1D62, 'M', u'i'), + (0x1D63, 'M', u'r'), + (0x1D64, 'M', u'u'), + (0x1D65, 'M', u'v'), + (0x1D66, 'M', u'β'), + (0x1D67, 'M', u'γ'), + (0x1D68, 'M', u'ρ'), + (0x1D69, 'M', u'φ'), + (0x1D6A, 'M', u'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', u'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', u'ɒ'), + (0x1D9C, 'M', u'c'), + (0x1D9D, 'M', u'ɕ'), + (0x1D9E, 'M', u'ð'), + ] + +def _seg_16(): + return [ + (0x1D9F, 'M', u'ɜ'), + (0x1DA0, 'M', u'f'), + (0x1DA1, 'M', u'ɟ'), + (0x1DA2, 'M', u'ɡ'), + (0x1DA3, 'M', u'ɥ'), + (0x1DA4, 'M', u'ɨ'), + (0x1DA5, 'M', u'ɩ'), + (0x1DA6, 'M', u'ɪ'), + (0x1DA7, 'M', u'ᵻ'), + (0x1DA8, 'M', u'ʝ'), + (0x1DA9, 'M', u'ɭ'), + (0x1DAA, 'M', u'ᶅ'), + (0x1DAB, 'M', u'ʟ'), + (0x1DAC, 'M', u'ɱ'), + (0x1DAD, 'M', u'ɰ'), + (0x1DAE, 'M', u'ɲ'), + (0x1DAF, 'M', u'ɳ'), + (0x1DB0, 'M', u'ɴ'), + (0x1DB1, 'M', u'ɵ'), + (0x1DB2, 'M', u'ɸ'), + (0x1DB3, 'M', u'ʂ'), + (0x1DB4, 'M', u'ʃ'), + (0x1DB5, 'M', u'ƫ'), + (0x1DB6, 'M', u'ʉ'), + (0x1DB7, 'M', u'ʊ'), + (0x1DB8, 'M', u'ᴜ'), + (0x1DB9, 'M', u'ʋ'), + (0x1DBA, 'M', u'ʌ'), + (0x1DBB, 'M', u'z'), + (0x1DBC, 'M', u'ʐ'), + (0x1DBD, 'M', u'ʑ'), + (0x1DBE, 'M', u'ʒ'), + (0x1DBF, 'M', u'θ'), + (0x1DC0, 'V'), + (0x1DFA, 'X'), + (0x1DFB, 'V'), + (0x1E00, 'M', u'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', u'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', u'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', u'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', u'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', u'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', u'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', u'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', u'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', u'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', u'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', u'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', u'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', u'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', u'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', u'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', u'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', u'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', u'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', u'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', u'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', u'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', u'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', u'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', u'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', u'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', u'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', u'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', u'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', u'ḿ'), + (0x1E3F, 'V'), + ] + +def _seg_17(): + return [ + (0x1E40, 'M', u'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', u'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', u'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', u'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', u'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', u'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', u'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', u'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', u'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', u'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', u'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', u'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', u'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', u'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', u'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', u'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', u'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', u'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', u'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', u'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', u'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', u'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', u'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', u'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', u'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', u'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', u'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', u'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', u'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', u'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', u'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', u'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', u'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', u'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', u'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', u'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', u'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', u'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', u'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', u'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', u'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', u'aʾ'), + (0x1E9B, 'M', u'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', u'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', u'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', u'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', u'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', u'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', u'ẩ'), + ] + +def _seg_18(): + return [ + (0x1EA9, 'V'), + (0x1EAA, 'M', u'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', u'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', u'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', u'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', u'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', u'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', u'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', u'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', u'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', u'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', u'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', u'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', u'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', u'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', u'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', u'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', u'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', u'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', u'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', u'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', u'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', u'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', u'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', u'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', u'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', u'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', u'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', u'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', u'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', u'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', u'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', u'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', u'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', u'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', u'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', u'ự'), + (0x1EF1, 'V'), + (0x1EF2, 'M', u'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', u'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', u'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', u'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', u'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', u'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', u'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', u'ἀ'), + (0x1F09, 'M', u'ἁ'), + (0x1F0A, 'M', u'ἂ'), + (0x1F0B, 'M', u'ἃ'), + (0x1F0C, 'M', u'ἄ'), + (0x1F0D, 'M', u'ἅ'), + (0x1F0E, 'M', u'ἆ'), + (0x1F0F, 'M', u'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', u'ἐ'), + (0x1F19, 'M', u'ἑ'), + (0x1F1A, 'M', u'ἒ'), + ] + +def _seg_19(): + return [ + (0x1F1B, 'M', u'ἓ'), + (0x1F1C, 'M', u'ἔ'), + (0x1F1D, 'M', u'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', u'ἠ'), + (0x1F29, 'M', u'ἡ'), + (0x1F2A, 'M', u'ἢ'), + (0x1F2B, 'M', u'ἣ'), + (0x1F2C, 'M', u'ἤ'), + (0x1F2D, 'M', u'ἥ'), + (0x1F2E, 'M', u'ἦ'), + (0x1F2F, 'M', u'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', u'ἰ'), + (0x1F39, 'M', u'ἱ'), + (0x1F3A, 'M', u'ἲ'), + (0x1F3B, 'M', u'ἳ'), + (0x1F3C, 'M', u'ἴ'), + (0x1F3D, 'M', u'ἵ'), + (0x1F3E, 'M', u'ἶ'), + (0x1F3F, 'M', u'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', u'ὀ'), + (0x1F49, 'M', u'ὁ'), + (0x1F4A, 'M', u'ὂ'), + (0x1F4B, 'M', u'ὃ'), + (0x1F4C, 'M', u'ὄ'), + (0x1F4D, 'M', u'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', u'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', u'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', u'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', u'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', u'ὠ'), + (0x1F69, 'M', u'ὡ'), + (0x1F6A, 'M', u'ὢ'), + (0x1F6B, 'M', u'ὣ'), + (0x1F6C, 'M', u'ὤ'), + (0x1F6D, 'M', u'ὥ'), + (0x1F6E, 'M', u'ὦ'), + (0x1F6F, 'M', u'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', u'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', u'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', u'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', u'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', u'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', u'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', u'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', u'ἀι'), + (0x1F81, 'M', u'ἁι'), + (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), + (0x1F85, 'M', u'ἅι'), + (0x1F86, 'M', u'ἆι'), + (0x1F87, 'M', u'ἇι'), + (0x1F88, 'M', u'ἀι'), + (0x1F89, 'M', u'ἁι'), + (0x1F8A, 'M', u'ἂι'), + (0x1F8B, 'M', u'ἃι'), + (0x1F8C, 'M', u'ἄι'), + (0x1F8D, 'M', u'ἅι'), + (0x1F8E, 'M', u'ἆι'), + (0x1F8F, 'M', u'ἇι'), + (0x1F90, 'M', u'ἠι'), + (0x1F91, 'M', u'ἡι'), + (0x1F92, 'M', u'ἢι'), + (0x1F93, 'M', u'ἣι'), + (0x1F94, 'M', u'ἤι'), + (0x1F95, 'M', u'ἥι'), + (0x1F96, 'M', u'ἦι'), + (0x1F97, 'M', u'ἧι'), + (0x1F98, 'M', u'ἠι'), + (0x1F99, 'M', u'ἡι'), + (0x1F9A, 'M', u'ἢι'), + (0x1F9B, 'M', u'ἣι'), + (0x1F9C, 'M', u'ἤι'), + (0x1F9D, 'M', u'ἥι'), + (0x1F9E, 'M', u'ἦι'), + (0x1F9F, 'M', u'ἧι'), + (0x1FA0, 'M', u'ὠι'), + (0x1FA1, 'M', u'ὡι'), + (0x1FA2, 'M', u'ὢι'), + (0x1FA3, 'M', u'ὣι'), + ] + +def _seg_20(): + return [ + (0x1FA4, 'M', u'ὤι'), + (0x1FA5, 'M', u'ὥι'), + (0x1FA6, 'M', u'ὦι'), + (0x1FA7, 'M', u'ὧι'), + (0x1FA8, 'M', u'ὠι'), + (0x1FA9, 'M', u'ὡι'), + (0x1FAA, 'M', u'ὢι'), + (0x1FAB, 'M', u'ὣι'), + (0x1FAC, 'M', u'ὤι'), + (0x1FAD, 'M', u'ὥι'), + (0x1FAE, 'M', u'ὦι'), + (0x1FAF, 'M', u'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', u'ὰι'), + (0x1FB3, 'M', u'αι'), + (0x1FB4, 'M', u'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', u'ᾶι'), + (0x1FB8, 'M', u'ᾰ'), + (0x1FB9, 'M', u'ᾱ'), + (0x1FBA, 'M', u'ὰ'), + (0x1FBB, 'M', u'ά'), + (0x1FBC, 'M', u'αι'), + (0x1FBD, '3', u' ̓'), + (0x1FBE, 'M', u'ι'), + (0x1FBF, '3', u' ̓'), + (0x1FC0, '3', u' ͂'), + (0x1FC1, '3', u' ̈͂'), + (0x1FC2, 'M', u'ὴι'), + (0x1FC3, 'M', u'ηι'), + (0x1FC4, 'M', u'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', u'ῆι'), + (0x1FC8, 'M', u'ὲ'), + (0x1FC9, 'M', u'έ'), + (0x1FCA, 'M', u'ὴ'), + (0x1FCB, 'M', u'ή'), + (0x1FCC, 'M', u'ηι'), + (0x1FCD, '3', u' ̓̀'), + (0x1FCE, '3', u' ̓́'), + (0x1FCF, '3', u' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', u'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', u'ῐ'), + (0x1FD9, 'M', u'ῑ'), + (0x1FDA, 'M', u'ὶ'), + (0x1FDB, 'M', u'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', u' ̔̀'), + (0x1FDE, '3', u' ̔́'), + (0x1FDF, '3', u' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', u'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', u'ῠ'), + (0x1FE9, 'M', u'ῡ'), + (0x1FEA, 'M', u'ὺ'), + (0x1FEB, 'M', u'ύ'), + (0x1FEC, 'M', u'ῥ'), + (0x1FED, '3', u' ̈̀'), + (0x1FEE, '3', u' ̈́'), + (0x1FEF, '3', u'`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), + (0x1FF4, 'M', u'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', u'ῶι'), + (0x1FF8, 'M', u'ὸ'), + (0x1FF9, 'M', u'ό'), + (0x1FFA, 'M', u'ὼ'), + (0x1FFB, 'M', u'ώ'), + (0x1FFC, 'M', u'ωι'), + (0x1FFD, '3', u' ́'), + (0x1FFE, '3', u' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', u' '), + (0x200B, 'I'), + (0x200C, 'D', u''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', u'‐'), + (0x2012, 'V'), + (0x2017, '3', u' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', u' '), + (0x2030, 'V'), + (0x2033, 'M', u'′′'), + (0x2034, 'M', u'′′′'), + (0x2035, 'V'), + (0x2036, 'M', u'‵‵'), + (0x2037, 'M', u'‵‵‵'), + ] + +def _seg_21(): + return [ + (0x2038, 'V'), + (0x203C, '3', u'!!'), + (0x203D, 'V'), + (0x203E, '3', u' ̅'), + (0x203F, 'V'), + (0x2047, '3', u'??'), + (0x2048, '3', u'?!'), + (0x2049, '3', u'!?'), + (0x204A, 'V'), + (0x2057, 'M', u'′′′′'), + (0x2058, 'V'), + (0x205F, '3', u' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', u'0'), + (0x2071, 'M', u'i'), + (0x2072, 'X'), + (0x2074, 'M', u'4'), + (0x2075, 'M', u'5'), + (0x2076, 'M', u'6'), + (0x2077, 'M', u'7'), + (0x2078, 'M', u'8'), + (0x2079, 'M', u'9'), + (0x207A, '3', u'+'), + (0x207B, 'M', u'−'), + (0x207C, '3', u'='), + (0x207D, '3', u'('), + (0x207E, '3', u')'), + (0x207F, 'M', u'n'), + (0x2080, 'M', u'0'), + (0x2081, 'M', u'1'), + (0x2082, 'M', u'2'), + (0x2083, 'M', u'3'), + (0x2084, 'M', u'4'), + (0x2085, 'M', u'5'), + (0x2086, 'M', u'6'), + (0x2087, 'M', u'7'), + (0x2088, 'M', u'8'), + (0x2089, 'M', u'9'), + (0x208A, '3', u'+'), + (0x208B, 'M', u'−'), + (0x208C, '3', u'='), + (0x208D, '3', u'('), + (0x208E, '3', u')'), + (0x208F, 'X'), + (0x2090, 'M', u'a'), + (0x2091, 'M', u'e'), + (0x2092, 'M', u'o'), + (0x2093, 'M', u'x'), + (0x2094, 'M', u'ə'), + (0x2095, 'M', u'h'), + (0x2096, 'M', u'k'), + (0x2097, 'M', u'l'), + (0x2098, 'M', u'm'), + (0x2099, 'M', u'n'), + (0x209A, 'M', u'p'), + (0x209B, 'M', u's'), + (0x209C, 'M', u't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', u'rs'), + (0x20A9, 'V'), + (0x20C0, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), + (0x2102, 'M', u'c'), + (0x2103, 'M', u'°c'), + (0x2104, 'V'), + (0x2105, '3', u'c/o'), + (0x2106, '3', u'c/u'), + (0x2107, 'M', u'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', u'°f'), + (0x210A, 'M', u'g'), + (0x210B, 'M', u'h'), + (0x210F, 'M', u'ħ'), + (0x2110, 'M', u'i'), + (0x2112, 'M', u'l'), + (0x2114, 'V'), + (0x2115, 'M', u'n'), + (0x2116, 'M', u'no'), + (0x2117, 'V'), + (0x2119, 'M', u'p'), + (0x211A, 'M', u'q'), + (0x211B, 'M', u'r'), + (0x211E, 'V'), + (0x2120, 'M', u'sm'), + (0x2121, 'M', u'tel'), + (0x2122, 'M', u'tm'), + (0x2123, 'V'), + (0x2124, 'M', u'z'), + (0x2125, 'V'), + (0x2126, 'M', u'ω'), + (0x2127, 'V'), + (0x2128, 'M', u'z'), + (0x2129, 'V'), + ] + +def _seg_22(): + return [ + (0x212A, 'M', u'k'), + (0x212B, 'M', u'å'), + (0x212C, 'M', u'b'), + (0x212D, 'M', u'c'), + (0x212E, 'V'), + (0x212F, 'M', u'e'), + (0x2131, 'M', u'f'), + (0x2132, 'X'), + (0x2133, 'M', u'm'), + (0x2134, 'M', u'o'), + (0x2135, 'M', u'א'), + (0x2136, 'M', u'ב'), + (0x2137, 'M', u'ג'), + (0x2138, 'M', u'ד'), + (0x2139, 'M', u'i'), + (0x213A, 'V'), + (0x213B, 'M', u'fax'), + (0x213C, 'M', u'π'), + (0x213D, 'M', u'γ'), + (0x213F, 'M', u'π'), + (0x2140, 'M', u'∑'), + (0x2141, 'V'), + (0x2145, 'M', u'd'), + (0x2147, 'M', u'e'), + (0x2148, 'M', u'i'), + (0x2149, 'M', u'j'), + (0x214A, 'V'), + (0x2150, 'M', u'1⁄7'), + (0x2151, 'M', u'1⁄9'), + (0x2152, 'M', u'1⁄10'), + (0x2153, 'M', u'1⁄3'), + (0x2154, 'M', u'2⁄3'), + (0x2155, 'M', u'1⁄5'), + (0x2156, 'M', u'2⁄5'), + (0x2157, 'M', u'3⁄5'), + (0x2158, 'M', u'4⁄5'), + (0x2159, 'M', u'1⁄6'), + (0x215A, 'M', u'5⁄6'), + (0x215B, 'M', u'1⁄8'), + (0x215C, 'M', u'3⁄8'), + (0x215D, 'M', u'5⁄8'), + (0x215E, 'M', u'7⁄8'), + (0x215F, 'M', u'1⁄'), + (0x2160, 'M', u'i'), + (0x2161, 'M', u'ii'), + (0x2162, 'M', u'iii'), + (0x2163, 'M', u'iv'), + (0x2164, 'M', u'v'), + (0x2165, 'M', u'vi'), + (0x2166, 'M', u'vii'), + (0x2167, 'M', u'viii'), + (0x2168, 'M', u'ix'), + (0x2169, 'M', u'x'), + (0x216A, 'M', u'xi'), + (0x216B, 'M', u'xii'), + (0x216C, 'M', u'l'), + (0x216D, 'M', u'c'), + (0x216E, 'M', u'd'), + (0x216F, 'M', u'm'), + (0x2170, 'M', u'i'), + (0x2171, 'M', u'ii'), + (0x2172, 'M', u'iii'), + (0x2173, 'M', u'iv'), + (0x2174, 'M', u'v'), + (0x2175, 'M', u'vi'), + (0x2176, 'M', u'vii'), + (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), + (0x217A, 'M', u'xi'), + (0x217B, 'M', u'xii'), + (0x217C, 'M', u'l'), + (0x217D, 'M', u'c'), + (0x217E, 'M', u'd'), + (0x217F, 'M', u'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', u'0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', u'∫∫'), + (0x222D, 'M', u'∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', u'∮∮'), + (0x2230, 'M', u'∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', u'〈'), + (0x232A, 'M', u'〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', u'1'), + (0x2461, 'M', u'2'), + ] + +def _seg_23(): + return [ + (0x2462, 'M', u'3'), + (0x2463, 'M', u'4'), + (0x2464, 'M', u'5'), + (0x2465, 'M', u'6'), + (0x2466, 'M', u'7'), + (0x2467, 'M', u'8'), + (0x2468, 'M', u'9'), + (0x2469, 'M', u'10'), + (0x246A, 'M', u'11'), + (0x246B, 'M', u'12'), + (0x246C, 'M', u'13'), + (0x246D, 'M', u'14'), + (0x246E, 'M', u'15'), + (0x246F, 'M', u'16'), + (0x2470, 'M', u'17'), + (0x2471, 'M', u'18'), + (0x2472, 'M', u'19'), + (0x2473, 'M', u'20'), + (0x2474, '3', u'(1)'), + (0x2475, '3', u'(2)'), + (0x2476, '3', u'(3)'), + (0x2477, '3', u'(4)'), + (0x2478, '3', u'(5)'), + (0x2479, '3', u'(6)'), + (0x247A, '3', u'(7)'), + (0x247B, '3', u'(8)'), + (0x247C, '3', u'(9)'), + (0x247D, '3', u'(10)'), + (0x247E, '3', u'(11)'), + (0x247F, '3', u'(12)'), + (0x2480, '3', u'(13)'), + (0x2481, '3', u'(14)'), + (0x2482, '3', u'(15)'), + (0x2483, '3', u'(16)'), + (0x2484, '3', u'(17)'), + (0x2485, '3', u'(18)'), + (0x2486, '3', u'(19)'), + (0x2487, '3', u'(20)'), + (0x2488, 'X'), + (0x249C, '3', u'(a)'), + (0x249D, '3', u'(b)'), + (0x249E, '3', u'(c)'), + (0x249F, '3', u'(d)'), + (0x24A0, '3', u'(e)'), + (0x24A1, '3', u'(f)'), + (0x24A2, '3', u'(g)'), + (0x24A3, '3', u'(h)'), + (0x24A4, '3', u'(i)'), + (0x24A5, '3', u'(j)'), + (0x24A6, '3', u'(k)'), + (0x24A7, '3', u'(l)'), + (0x24A8, '3', u'(m)'), + (0x24A9, '3', u'(n)'), + (0x24AA, '3', u'(o)'), + (0x24AB, '3', u'(p)'), + (0x24AC, '3', u'(q)'), + (0x24AD, '3', u'(r)'), + (0x24AE, '3', u'(s)'), + (0x24AF, '3', u'(t)'), + (0x24B0, '3', u'(u)'), + (0x24B1, '3', u'(v)'), + (0x24B2, '3', u'(w)'), + (0x24B3, '3', u'(x)'), + (0x24B4, '3', u'(y)'), + (0x24B5, '3', u'(z)'), + (0x24B6, 'M', u'a'), + (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), + (0x24BA, 'M', u'e'), + (0x24BB, 'M', u'f'), + (0x24BC, 'M', u'g'), + (0x24BD, 'M', u'h'), + (0x24BE, 'M', u'i'), + (0x24BF, 'M', u'j'), + (0x24C0, 'M', u'k'), + (0x24C1, 'M', u'l'), + (0x24C2, 'M', u'm'), + (0x24C3, 'M', u'n'), + (0x24C4, 'M', u'o'), + (0x24C5, 'M', u'p'), + (0x24C6, 'M', u'q'), + (0x24C7, 'M', u'r'), + (0x24C8, 'M', u's'), + (0x24C9, 'M', u't'), + (0x24CA, 'M', u'u'), + (0x24CB, 'M', u'v'), + (0x24CC, 'M', u'w'), + (0x24CD, 'M', u'x'), + (0x24CE, 'M', u'y'), + (0x24CF, 'M', u'z'), + (0x24D0, 'M', u'a'), + (0x24D1, 'M', u'b'), + (0x24D2, 'M', u'c'), + (0x24D3, 'M', u'd'), + (0x24D4, 'M', u'e'), + (0x24D5, 'M', u'f'), + (0x24D6, 'M', u'g'), + (0x24D7, 'M', u'h'), + (0x24D8, 'M', u'i'), + ] + +def _seg_24(): + return [ + (0x24D9, 'M', u'j'), + (0x24DA, 'M', u'k'), + (0x24DB, 'M', u'l'), + (0x24DC, 'M', u'm'), + (0x24DD, 'M', u'n'), + (0x24DE, 'M', u'o'), + (0x24DF, 'M', u'p'), + (0x24E0, 'M', u'q'), + (0x24E1, 'M', u'r'), + (0x24E2, 'M', u's'), + (0x24E3, 'M', u't'), + (0x24E4, 'M', u'u'), + (0x24E5, 'M', u'v'), + (0x24E6, 'M', u'w'), + (0x24E7, 'M', u'x'), + (0x24E8, 'M', u'y'), + (0x24E9, 'M', u'z'), + (0x24EA, 'M', u'0'), + (0x24EB, 'V'), + (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', u'::='), + (0x2A75, '3', u'=='), + (0x2A76, '3', u'==='), + (0x2A77, 'V'), + (0x2ADC, 'M', u'⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B98, 'V'), + (0x2BC9, 'X'), + (0x2BCA, 'V'), + (0x2BFF, 'X'), + (0x2C00, 'M', u'ⰰ'), + (0x2C01, 'M', u'ⰱ'), + (0x2C02, 'M', u'ⰲ'), + (0x2C03, 'M', u'ⰳ'), + (0x2C04, 'M', u'ⰴ'), + (0x2C05, 'M', u'ⰵ'), + (0x2C06, 'M', u'ⰶ'), + (0x2C07, 'M', u'ⰷ'), + (0x2C08, 'M', u'ⰸ'), + (0x2C09, 'M', u'ⰹ'), + (0x2C0A, 'M', u'ⰺ'), + (0x2C0B, 'M', u'ⰻ'), + (0x2C0C, 'M', u'ⰼ'), + (0x2C0D, 'M', u'ⰽ'), + (0x2C0E, 'M', u'ⰾ'), + (0x2C0F, 'M', u'ⰿ'), + (0x2C10, 'M', u'ⱀ'), + (0x2C11, 'M', u'ⱁ'), + (0x2C12, 'M', u'ⱂ'), + (0x2C13, 'M', u'ⱃ'), + (0x2C14, 'M', u'ⱄ'), + (0x2C15, 'M', u'ⱅ'), + (0x2C16, 'M', u'ⱆ'), + (0x2C17, 'M', u'ⱇ'), + (0x2C18, 'M', u'ⱈ'), + (0x2C19, 'M', u'ⱉ'), + (0x2C1A, 'M', u'ⱊ'), + (0x2C1B, 'M', u'ⱋ'), + (0x2C1C, 'M', u'ⱌ'), + (0x2C1D, 'M', u'ⱍ'), + (0x2C1E, 'M', u'ⱎ'), + (0x2C1F, 'M', u'ⱏ'), + (0x2C20, 'M', u'ⱐ'), + (0x2C21, 'M', u'ⱑ'), + (0x2C22, 'M', u'ⱒ'), + (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'ⱔ'), + (0x2C25, 'M', u'ⱕ'), + (0x2C26, 'M', u'ⱖ'), + (0x2C27, 'M', u'ⱗ'), + (0x2C28, 'M', u'ⱘ'), + (0x2C29, 'M', u'ⱙ'), + (0x2C2A, 'M', u'ⱚ'), + (0x2C2B, 'M', u'ⱛ'), + (0x2C2C, 'M', u'ⱜ'), + (0x2C2D, 'M', u'ⱝ'), + (0x2C2E, 'M', u'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', u'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', u'ɫ'), + (0x2C63, 'M', u'ᵽ'), + (0x2C64, 'M', u'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', u'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', u'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', u'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', u'ɑ'), + (0x2C6E, 'M', u'ɱ'), + (0x2C6F, 'M', u'ɐ'), + (0x2C70, 'M', u'ɒ'), + ] + +def _seg_25(): + return [ + (0x2C71, 'V'), + (0x2C72, 'M', u'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', u'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', u'j'), + (0x2C7D, 'M', u'v'), + (0x2C7E, 'M', u'ȿ'), + (0x2C7F, 'M', u'ɀ'), + (0x2C80, 'M', u'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', u'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', u'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', u'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', u'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', u'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', u'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', u'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', u'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', u'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', u'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', u'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', u'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', u'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', u'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', u'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', u'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', u'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', u'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', u'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', u'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', u'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', u'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', u'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', u'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', u'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', u'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', u'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', u'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', u'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', u'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', u'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', u'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', u'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', u'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', u'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', u'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', u'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', u'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', u'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', u'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', u'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', u'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', u'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', u'ⳛ'), + ] + +def _seg_26(): + return [ + (0x2CDB, 'V'), + (0x2CDC, 'M', u'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', u'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', u'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', u'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', u'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', u'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', u'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', u'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E4F, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', u'母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', u'龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', u'一'), + (0x2F01, 'M', u'丨'), + (0x2F02, 'M', u'丶'), + (0x2F03, 'M', u'丿'), + (0x2F04, 'M', u'乙'), + (0x2F05, 'M', u'亅'), + (0x2F06, 'M', u'二'), + (0x2F07, 'M', u'亠'), + (0x2F08, 'M', u'人'), + (0x2F09, 'M', u'儿'), + (0x2F0A, 'M', u'入'), + (0x2F0B, 'M', u'八'), + (0x2F0C, 'M', u'冂'), + (0x2F0D, 'M', u'冖'), + (0x2F0E, 'M', u'冫'), + (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), + (0x2F12, 'M', u'力'), + (0x2F13, 'M', u'勹'), + (0x2F14, 'M', u'匕'), + (0x2F15, 'M', u'匚'), + (0x2F16, 'M', u'匸'), + (0x2F17, 'M', u'十'), + (0x2F18, 'M', u'卜'), + (0x2F19, 'M', u'卩'), + (0x2F1A, 'M', u'厂'), + (0x2F1B, 'M', u'厶'), + (0x2F1C, 'M', u'又'), + (0x2F1D, 'M', u'口'), + (0x2F1E, 'M', u'囗'), + (0x2F1F, 'M', u'土'), + (0x2F20, 'M', u'士'), + (0x2F21, 'M', u'夂'), + (0x2F22, 'M', u'夊'), + (0x2F23, 'M', u'夕'), + (0x2F24, 'M', u'大'), + (0x2F25, 'M', u'女'), + (0x2F26, 'M', u'子'), + (0x2F27, 'M', u'宀'), + (0x2F28, 'M', u'寸'), + (0x2F29, 'M', u'小'), + (0x2F2A, 'M', u'尢'), + (0x2F2B, 'M', u'尸'), + (0x2F2C, 'M', u'屮'), + (0x2F2D, 'M', u'山'), + ] + +def _seg_27(): + return [ + (0x2F2E, 'M', u'巛'), + (0x2F2F, 'M', u'工'), + (0x2F30, 'M', u'己'), + (0x2F31, 'M', u'巾'), + (0x2F32, 'M', u'干'), + (0x2F33, 'M', u'幺'), + (0x2F34, 'M', u'广'), + (0x2F35, 'M', u'廴'), + (0x2F36, 'M', u'廾'), + (0x2F37, 'M', u'弋'), + (0x2F38, 'M', u'弓'), + (0x2F39, 'M', u'彐'), + (0x2F3A, 'M', u'彡'), + (0x2F3B, 'M', u'彳'), + (0x2F3C, 'M', u'心'), + (0x2F3D, 'M', u'戈'), + (0x2F3E, 'M', u'戶'), + (0x2F3F, 'M', u'手'), + (0x2F40, 'M', u'支'), + (0x2F41, 'M', u'攴'), + (0x2F42, 'M', u'文'), + (0x2F43, 'M', u'斗'), + (0x2F44, 'M', u'斤'), + (0x2F45, 'M', u'方'), + (0x2F46, 'M', u'无'), + (0x2F47, 'M', u'日'), + (0x2F48, 'M', u'曰'), + (0x2F49, 'M', u'月'), + (0x2F4A, 'M', u'木'), + (0x2F4B, 'M', u'欠'), + (0x2F4C, 'M', u'止'), + (0x2F4D, 'M', u'歹'), + (0x2F4E, 'M', u'殳'), + (0x2F4F, 'M', u'毋'), + (0x2F50, 'M', u'比'), + (0x2F51, 'M', u'毛'), + (0x2F52, 'M', u'氏'), + (0x2F53, 'M', u'气'), + (0x2F54, 'M', u'水'), + (0x2F55, 'M', u'火'), + (0x2F56, 'M', u'爪'), + (0x2F57, 'M', u'父'), + (0x2F58, 'M', u'爻'), + (0x2F59, 'M', u'爿'), + (0x2F5A, 'M', u'片'), + (0x2F5B, 'M', u'牙'), + (0x2F5C, 'M', u'牛'), + (0x2F5D, 'M', u'犬'), + (0x2F5E, 'M', u'玄'), + (0x2F5F, 'M', u'玉'), + (0x2F60, 'M', u'瓜'), + (0x2F61, 'M', u'瓦'), + (0x2F62, 'M', u'甘'), + (0x2F63, 'M', u'生'), + (0x2F64, 'M', u'用'), + (0x2F65, 'M', u'田'), + (0x2F66, 'M', u'疋'), + (0x2F67, 'M', u'疒'), + (0x2F68, 'M', u'癶'), + (0x2F69, 'M', u'白'), + (0x2F6A, 'M', u'皮'), + (0x2F6B, 'M', u'皿'), + (0x2F6C, 'M', u'目'), + (0x2F6D, 'M', u'矛'), + (0x2F6E, 'M', u'矢'), + (0x2F6F, 'M', u'石'), + (0x2F70, 'M', u'示'), + (0x2F71, 'M', u'禸'), + (0x2F72, 'M', u'禾'), + (0x2F73, 'M', u'穴'), + (0x2F74, 'M', u'立'), + (0x2F75, 'M', u'竹'), + (0x2F76, 'M', u'米'), + (0x2F77, 'M', u'糸'), + (0x2F78, 'M', u'缶'), + (0x2F79, 'M', u'网'), + (0x2F7A, 'M', u'羊'), + (0x2F7B, 'M', u'羽'), + (0x2F7C, 'M', u'老'), + (0x2F7D, 'M', u'而'), + (0x2F7E, 'M', u'耒'), + (0x2F7F, 'M', u'耳'), + (0x2F80, 'M', u'聿'), + (0x2F81, 'M', u'肉'), + (0x2F82, 'M', u'臣'), + (0x2F83, 'M', u'自'), + (0x2F84, 'M', u'至'), + (0x2F85, 'M', u'臼'), + (0x2F86, 'M', u'舌'), + (0x2F87, 'M', u'舛'), + (0x2F88, 'M', u'舟'), + (0x2F89, 'M', u'艮'), + (0x2F8A, 'M', u'色'), + (0x2F8B, 'M', u'艸'), + (0x2F8C, 'M', u'虍'), + (0x2F8D, 'M', u'虫'), + (0x2F8E, 'M', u'血'), + (0x2F8F, 'M', u'行'), + (0x2F90, 'M', u'衣'), + (0x2F91, 'M', u'襾'), + ] + +def _seg_28(): + return [ + (0x2F92, 'M', u'見'), + (0x2F93, 'M', u'角'), + (0x2F94, 'M', u'言'), + (0x2F95, 'M', u'谷'), + (0x2F96, 'M', u'豆'), + (0x2F97, 'M', u'豕'), + (0x2F98, 'M', u'豸'), + (0x2F99, 'M', u'貝'), + (0x2F9A, 'M', u'赤'), + (0x2F9B, 'M', u'走'), + (0x2F9C, 'M', u'足'), + (0x2F9D, 'M', u'身'), + (0x2F9E, 'M', u'車'), + (0x2F9F, 'M', u'辛'), + (0x2FA0, 'M', u'辰'), + (0x2FA1, 'M', u'辵'), + (0x2FA2, 'M', u'邑'), + (0x2FA3, 'M', u'酉'), + (0x2FA4, 'M', u'釆'), + (0x2FA5, 'M', u'里'), + (0x2FA6, 'M', u'金'), + (0x2FA7, 'M', u'長'), + (0x2FA8, 'M', u'門'), + (0x2FA9, 'M', u'阜'), + (0x2FAA, 'M', u'隶'), + (0x2FAB, 'M', u'隹'), + (0x2FAC, 'M', u'雨'), + (0x2FAD, 'M', u'靑'), + (0x2FAE, 'M', u'非'), + (0x2FAF, 'M', u'面'), + (0x2FB0, 'M', u'革'), + (0x2FB1, 'M', u'韋'), + (0x2FB2, 'M', u'韭'), + (0x2FB3, 'M', u'音'), + (0x2FB4, 'M', u'頁'), + (0x2FB5, 'M', u'風'), + (0x2FB6, 'M', u'飛'), + (0x2FB7, 'M', u'食'), + (0x2FB8, 'M', u'首'), + (0x2FB9, 'M', u'香'), + (0x2FBA, 'M', u'馬'), + (0x2FBB, 'M', u'骨'), + (0x2FBC, 'M', u'高'), + (0x2FBD, 'M', u'髟'), + (0x2FBE, 'M', u'鬥'), + (0x2FBF, 'M', u'鬯'), + (0x2FC0, 'M', u'鬲'), + (0x2FC1, 'M', u'鬼'), + (0x2FC2, 'M', u'魚'), + (0x2FC3, 'M', u'鳥'), + (0x2FC4, 'M', u'鹵'), + (0x2FC5, 'M', u'鹿'), + (0x2FC6, 'M', u'麥'), + (0x2FC7, 'M', u'麻'), + (0x2FC8, 'M', u'黃'), + (0x2FC9, 'M', u'黍'), + (0x2FCA, 'M', u'黑'), + (0x2FCB, 'M', u'黹'), + (0x2FCC, 'M', u'黽'), + (0x2FCD, 'M', u'鼎'), + (0x2FCE, 'M', u'鼓'), + (0x2FCF, 'M', u'鼠'), + (0x2FD0, 'M', u'鼻'), + (0x2FD1, 'M', u'齊'), + (0x2FD2, 'M', u'齒'), + (0x2FD3, 'M', u'龍'), + (0x2FD4, 'M', u'龜'), + (0x2FD5, 'M', u'龠'), + (0x2FD6, 'X'), + (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), + (0x3003, 'V'), + (0x3036, 'M', u'〒'), + (0x3037, 'V'), + (0x3038, 'M', u'十'), + (0x3039, 'M', u'卄'), + (0x303A, 'M', u'卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', u' ゙'), + (0x309C, '3', u' ゚'), + (0x309D, 'V'), + (0x309F, 'M', u'より'), + (0x30A0, 'V'), + (0x30FF, 'M', u'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', u'ᄀ'), + (0x3132, 'M', u'ᄁ'), + (0x3133, 'M', u'ᆪ'), + (0x3134, 'M', u'ᄂ'), + (0x3135, 'M', u'ᆬ'), + (0x3136, 'M', u'ᆭ'), + (0x3137, 'M', u'ᄃ'), + (0x3138, 'M', u'ᄄ'), + ] + +def _seg_29(): + return [ + (0x3139, 'M', u'ᄅ'), + (0x313A, 'M', u'ᆰ'), + (0x313B, 'M', u'ᆱ'), + (0x313C, 'M', u'ᆲ'), + (0x313D, 'M', u'ᆳ'), + (0x313E, 'M', u'ᆴ'), + (0x313F, 'M', u'ᆵ'), + (0x3140, 'M', u'ᄚ'), + (0x3141, 'M', u'ᄆ'), + (0x3142, 'M', u'ᄇ'), + (0x3143, 'M', u'ᄈ'), + (0x3144, 'M', u'ᄡ'), + (0x3145, 'M', u'ᄉ'), + (0x3146, 'M', u'ᄊ'), + (0x3147, 'M', u'ᄋ'), + (0x3148, 'M', u'ᄌ'), + (0x3149, 'M', u'ᄍ'), + (0x314A, 'M', u'ᄎ'), + (0x314B, 'M', u'ᄏ'), + (0x314C, 'M', u'ᄐ'), + (0x314D, 'M', u'ᄑ'), + (0x314E, 'M', u'ᄒ'), + (0x314F, 'M', u'ᅡ'), + (0x3150, 'M', u'ᅢ'), + (0x3151, 'M', u'ᅣ'), + (0x3152, 'M', u'ᅤ'), + (0x3153, 'M', u'ᅥ'), + (0x3154, 'M', u'ᅦ'), + (0x3155, 'M', u'ᅧ'), + (0x3156, 'M', u'ᅨ'), + (0x3157, 'M', u'ᅩ'), + (0x3158, 'M', u'ᅪ'), + (0x3159, 'M', u'ᅫ'), + (0x315A, 'M', u'ᅬ'), + (0x315B, 'M', u'ᅭ'), + (0x315C, 'M', u'ᅮ'), + (0x315D, 'M', u'ᅯ'), + (0x315E, 'M', u'ᅰ'), + (0x315F, 'M', u'ᅱ'), + (0x3160, 'M', u'ᅲ'), + (0x3161, 'M', u'ᅳ'), + (0x3162, 'M', u'ᅴ'), + (0x3163, 'M', u'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', u'ᄔ'), + (0x3166, 'M', u'ᄕ'), + (0x3167, 'M', u'ᇇ'), + (0x3168, 'M', u'ᇈ'), + (0x3169, 'M', u'ᇌ'), + (0x316A, 'M', u'ᇎ'), + (0x316B, 'M', u'ᇓ'), + (0x316C, 'M', u'ᇗ'), + (0x316D, 'M', u'ᇙ'), + (0x316E, 'M', u'ᄜ'), + (0x316F, 'M', u'ᇝ'), + (0x3170, 'M', u'ᇟ'), + (0x3171, 'M', u'ᄝ'), + (0x3172, 'M', u'ᄞ'), + (0x3173, 'M', u'ᄠ'), + (0x3174, 'M', u'ᄢ'), + (0x3175, 'M', u'ᄣ'), + (0x3176, 'M', u'ᄧ'), + (0x3177, 'M', u'ᄩ'), + (0x3178, 'M', u'ᄫ'), + (0x3179, 'M', u'ᄬ'), + (0x317A, 'M', u'ᄭ'), + (0x317B, 'M', u'ᄮ'), + (0x317C, 'M', u'ᄯ'), + (0x317D, 'M', u'ᄲ'), + (0x317E, 'M', u'ᄶ'), + (0x317F, 'M', u'ᅀ'), + (0x3180, 'M', u'ᅇ'), + (0x3181, 'M', u'ᅌ'), + (0x3182, 'M', u'ᇱ'), + (0x3183, 'M', u'ᇲ'), + (0x3184, 'M', u'ᅗ'), + (0x3185, 'M', u'ᅘ'), + (0x3186, 'M', u'ᅙ'), + (0x3187, 'M', u'ᆄ'), + (0x3188, 'M', u'ᆅ'), + (0x3189, 'M', u'ᆈ'), + (0x318A, 'M', u'ᆑ'), + (0x318B, 'M', u'ᆒ'), + (0x318C, 'M', u'ᆔ'), + (0x318D, 'M', u'ᆞ'), + (0x318E, 'M', u'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', u'一'), + (0x3193, 'M', u'二'), + (0x3194, 'M', u'三'), + (0x3195, 'M', u'四'), + (0x3196, 'M', u'上'), + (0x3197, 'M', u'中'), + (0x3198, 'M', u'下'), + (0x3199, 'M', u'甲'), + (0x319A, 'M', u'乙'), + (0x319B, 'M', u'丙'), + (0x319C, 'M', u'丁'), + (0x319D, 'M', u'天'), + ] + +def _seg_30(): + return [ + (0x319E, 'M', u'地'), + (0x319F, 'M', u'人'), + (0x31A0, 'V'), + (0x31BB, 'X'), + (0x31C0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', u'(ᄀ)'), + (0x3201, '3', u'(ᄂ)'), + (0x3202, '3', u'(ᄃ)'), + (0x3203, '3', u'(ᄅ)'), + (0x3204, '3', u'(ᄆ)'), + (0x3205, '3', u'(ᄇ)'), + (0x3206, '3', u'(ᄉ)'), + (0x3207, '3', u'(ᄋ)'), + (0x3208, '3', u'(ᄌ)'), + (0x3209, '3', u'(ᄎ)'), + (0x320A, '3', u'(ᄏ)'), + (0x320B, '3', u'(ᄐ)'), + (0x320C, '3', u'(ᄑ)'), + (0x320D, '3', u'(ᄒ)'), + (0x320E, '3', u'(가)'), + (0x320F, '3', u'(나)'), + (0x3210, '3', u'(다)'), + (0x3211, '3', u'(라)'), + (0x3212, '3', u'(마)'), + (0x3213, '3', u'(바)'), + (0x3214, '3', u'(사)'), + (0x3215, '3', u'(아)'), + (0x3216, '3', u'(자)'), + (0x3217, '3', u'(차)'), + (0x3218, '3', u'(카)'), + (0x3219, '3', u'(타)'), + (0x321A, '3', u'(파)'), + (0x321B, '3', u'(하)'), + (0x321C, '3', u'(주)'), + (0x321D, '3', u'(오전)'), + (0x321E, '3', u'(오후)'), + (0x321F, 'X'), + (0x3220, '3', u'(一)'), + (0x3221, '3', u'(二)'), + (0x3222, '3', u'(三)'), + (0x3223, '3', u'(四)'), + (0x3224, '3', u'(五)'), + (0x3225, '3', u'(六)'), + (0x3226, '3', u'(七)'), + (0x3227, '3', u'(八)'), + (0x3228, '3', u'(九)'), + (0x3229, '3', u'(十)'), + (0x322A, '3', u'(月)'), + (0x322B, '3', u'(火)'), + (0x322C, '3', u'(水)'), + (0x322D, '3', u'(木)'), + (0x322E, '3', u'(金)'), + (0x322F, '3', u'(土)'), + (0x3230, '3', u'(日)'), + (0x3231, '3', u'(株)'), + (0x3232, '3', u'(有)'), + (0x3233, '3', u'(社)'), + (0x3234, '3', u'(名)'), + (0x3235, '3', u'(特)'), + (0x3236, '3', u'(財)'), + (0x3237, '3', u'(祝)'), + (0x3238, '3', u'(労)'), + (0x3239, '3', u'(代)'), + (0x323A, '3', u'(呼)'), + (0x323B, '3', u'(学)'), + (0x323C, '3', u'(監)'), + (0x323D, '3', u'(企)'), + (0x323E, '3', u'(資)'), + (0x323F, '3', u'(協)'), + (0x3240, '3', u'(祭)'), + (0x3241, '3', u'(休)'), + (0x3242, '3', u'(自)'), + (0x3243, '3', u'(至)'), + (0x3244, 'M', u'問'), + (0x3245, 'M', u'幼'), + (0x3246, 'M', u'文'), + (0x3247, 'M', u'箏'), + (0x3248, 'V'), + (0x3250, 'M', u'pte'), + (0x3251, 'M', u'21'), + (0x3252, 'M', u'22'), + (0x3253, 'M', u'23'), + (0x3254, 'M', u'24'), + (0x3255, 'M', u'25'), + (0x3256, 'M', u'26'), + (0x3257, 'M', u'27'), + (0x3258, 'M', u'28'), + (0x3259, 'M', u'29'), + (0x325A, 'M', u'30'), + (0x325B, 'M', u'31'), + (0x325C, 'M', u'32'), + (0x325D, 'M', u'33'), + (0x325E, 'M', u'34'), + (0x325F, 'M', u'35'), + (0x3260, 'M', u'ᄀ'), + (0x3261, 'M', u'ᄂ'), + (0x3262, 'M', u'ᄃ'), + (0x3263, 'M', u'ᄅ'), + ] + +def _seg_31(): + return [ + (0x3264, 'M', u'ᄆ'), + (0x3265, 'M', u'ᄇ'), + (0x3266, 'M', u'ᄉ'), + (0x3267, 'M', u'ᄋ'), + (0x3268, 'M', u'ᄌ'), + (0x3269, 'M', u'ᄎ'), + (0x326A, 'M', u'ᄏ'), + (0x326B, 'M', u'ᄐ'), + (0x326C, 'M', u'ᄑ'), + (0x326D, 'M', u'ᄒ'), + (0x326E, 'M', u'가'), + (0x326F, 'M', u'나'), + (0x3270, 'M', u'다'), + (0x3271, 'M', u'라'), + (0x3272, 'M', u'마'), + (0x3273, 'M', u'바'), + (0x3274, 'M', u'사'), + (0x3275, 'M', u'아'), + (0x3276, 'M', u'자'), + (0x3277, 'M', u'차'), + (0x3278, 'M', u'카'), + (0x3279, 'M', u'타'), + (0x327A, 'M', u'파'), + (0x327B, 'M', u'하'), + (0x327C, 'M', u'참고'), + (0x327D, 'M', u'주의'), + (0x327E, 'M', u'우'), + (0x327F, 'V'), + (0x3280, 'M', u'一'), + (0x3281, 'M', u'二'), + (0x3282, 'M', u'三'), + (0x3283, 'M', u'四'), + (0x3284, 'M', u'五'), + (0x3285, 'M', u'六'), + (0x3286, 'M', u'七'), + (0x3287, 'M', u'八'), + (0x3288, 'M', u'九'), + (0x3289, 'M', u'十'), + (0x328A, 'M', u'月'), + (0x328B, 'M', u'火'), + (0x328C, 'M', u'水'), + (0x328D, 'M', u'木'), + (0x328E, 'M', u'金'), + (0x328F, 'M', u'土'), + (0x3290, 'M', u'日'), + (0x3291, 'M', u'株'), + (0x3292, 'M', u'有'), + (0x3293, 'M', u'社'), + (0x3294, 'M', u'名'), + (0x3295, 'M', u'特'), + (0x3296, 'M', u'財'), + (0x3297, 'M', u'祝'), + (0x3298, 'M', u'労'), + (0x3299, 'M', u'秘'), + (0x329A, 'M', u'男'), + (0x329B, 'M', u'女'), + (0x329C, 'M', u'適'), + (0x329D, 'M', u'優'), + (0x329E, 'M', u'印'), + (0x329F, 'M', u'注'), + (0x32A0, 'M', u'項'), + (0x32A1, 'M', u'休'), + (0x32A2, 'M', u'写'), + (0x32A3, 'M', u'正'), + (0x32A4, 'M', u'上'), + (0x32A5, 'M', u'中'), + (0x32A6, 'M', u'下'), + (0x32A7, 'M', u'左'), + (0x32A8, 'M', u'右'), + (0x32A9, 'M', u'医'), + (0x32AA, 'M', u'宗'), + (0x32AB, 'M', u'学'), + (0x32AC, 'M', u'監'), + (0x32AD, 'M', u'企'), + (0x32AE, 'M', u'資'), + (0x32AF, 'M', u'協'), + (0x32B0, 'M', u'夜'), + (0x32B1, 'M', u'36'), + (0x32B2, 'M', u'37'), + (0x32B3, 'M', u'38'), + (0x32B4, 'M', u'39'), + (0x32B5, 'M', u'40'), + (0x32B6, 'M', u'41'), + (0x32B7, 'M', u'42'), + (0x32B8, 'M', u'43'), + (0x32B9, 'M', u'44'), + (0x32BA, 'M', u'45'), + (0x32BB, 'M', u'46'), + (0x32BC, 'M', u'47'), + (0x32BD, 'M', u'48'), + (0x32BE, 'M', u'49'), + (0x32BF, 'M', u'50'), + (0x32C0, 'M', u'1月'), + (0x32C1, 'M', u'2月'), + (0x32C2, 'M', u'3月'), + (0x32C3, 'M', u'4月'), + (0x32C4, 'M', u'5月'), + (0x32C5, 'M', u'6月'), + (0x32C6, 'M', u'7月'), + (0x32C7, 'M', u'8月'), + ] + +def _seg_32(): + return [ + (0x32C8, 'M', u'9月'), + (0x32C9, 'M', u'10月'), + (0x32CA, 'M', u'11月'), + (0x32CB, 'M', u'12月'), + (0x32CC, 'M', u'hg'), + (0x32CD, 'M', u'erg'), + (0x32CE, 'M', u'ev'), + (0x32CF, 'M', u'ltd'), + (0x32D0, 'M', u'ア'), + (0x32D1, 'M', u'イ'), + (0x32D2, 'M', u'ウ'), + (0x32D3, 'M', u'エ'), + (0x32D4, 'M', u'オ'), + (0x32D5, 'M', u'カ'), + (0x32D6, 'M', u'キ'), + (0x32D7, 'M', u'ク'), + (0x32D8, 'M', u'ケ'), + (0x32D9, 'M', u'コ'), + (0x32DA, 'M', u'サ'), + (0x32DB, 'M', u'シ'), + (0x32DC, 'M', u'ス'), + (0x32DD, 'M', u'セ'), + (0x32DE, 'M', u'ソ'), + (0x32DF, 'M', u'タ'), + (0x32E0, 'M', u'チ'), + (0x32E1, 'M', u'ツ'), + (0x32E2, 'M', u'テ'), + (0x32E3, 'M', u'ト'), + (0x32E4, 'M', u'ナ'), + (0x32E5, 'M', u'ニ'), + (0x32E6, 'M', u'ヌ'), + (0x32E7, 'M', u'ネ'), + (0x32E8, 'M', u'ノ'), + (0x32E9, 'M', u'ハ'), + (0x32EA, 'M', u'ヒ'), + (0x32EB, 'M', u'フ'), + (0x32EC, 'M', u'ヘ'), + (0x32ED, 'M', u'ホ'), + (0x32EE, 'M', u'マ'), + (0x32EF, 'M', u'ミ'), + (0x32F0, 'M', u'ム'), + (0x32F1, 'M', u'メ'), + (0x32F2, 'M', u'モ'), + (0x32F3, 'M', u'ヤ'), + (0x32F4, 'M', u'ユ'), + (0x32F5, 'M', u'ヨ'), + (0x32F6, 'M', u'ラ'), + (0x32F7, 'M', u'リ'), + (0x32F8, 'M', u'ル'), + (0x32F9, 'M', u'レ'), + (0x32FA, 'M', u'ロ'), + (0x32FB, 'M', u'ワ'), + (0x32FC, 'M', u'ヰ'), + (0x32FD, 'M', u'ヱ'), + (0x32FE, 'M', u'ヲ'), + (0x32FF, 'X'), + (0x3300, 'M', u'アパート'), + (0x3301, 'M', u'アルファ'), + (0x3302, 'M', u'アンペア'), + (0x3303, 'M', u'アール'), + (0x3304, 'M', u'イニング'), + (0x3305, 'M', u'インチ'), + (0x3306, 'M', u'ウォン'), + (0x3307, 'M', u'エスクード'), + (0x3308, 'M', u'エーカー'), + (0x3309, 'M', u'オンス'), + (0x330A, 'M', u'オーム'), + (0x330B, 'M', u'カイリ'), + (0x330C, 'M', u'カラット'), + (0x330D, 'M', u'カロリー'), + (0x330E, 'M', u'ガロン'), + (0x330F, 'M', u'ガンマ'), + (0x3310, 'M', u'ギガ'), + (0x3311, 'M', u'ギニー'), + (0x3312, 'M', u'キュリー'), + (0x3313, 'M', u'ギルダー'), + (0x3314, 'M', u'キロ'), + (0x3315, 'M', u'キログラム'), + (0x3316, 'M', u'キロメートル'), + (0x3317, 'M', u'キロワット'), + (0x3318, 'M', u'グラム'), + (0x3319, 'M', u'グラムトン'), + (0x331A, 'M', u'クルゼイロ'), + (0x331B, 'M', u'クローネ'), + (0x331C, 'M', u'ケース'), + (0x331D, 'M', u'コルナ'), + (0x331E, 'M', u'コーポ'), + (0x331F, 'M', u'サイクル'), + (0x3320, 'M', u'サンチーム'), + (0x3321, 'M', u'シリング'), + (0x3322, 'M', u'センチ'), + (0x3323, 'M', u'セント'), + (0x3324, 'M', u'ダース'), + (0x3325, 'M', u'デシ'), + (0x3326, 'M', u'ドル'), + (0x3327, 'M', u'トン'), + (0x3328, 'M', u'ナノ'), + (0x3329, 'M', u'ノット'), + (0x332A, 'M', u'ハイツ'), + (0x332B, 'M', u'パーセント'), + ] + +def _seg_33(): + return [ + (0x332C, 'M', u'パーツ'), + (0x332D, 'M', u'バーレル'), + (0x332E, 'M', u'ピアストル'), + (0x332F, 'M', u'ピクル'), + (0x3330, 'M', u'ピコ'), + (0x3331, 'M', u'ビル'), + (0x3332, 'M', u'ファラッド'), + (0x3333, 'M', u'フィート'), + (0x3334, 'M', u'ブッシェル'), + (0x3335, 'M', u'フラン'), + (0x3336, 'M', u'ヘクタール'), + (0x3337, 'M', u'ペソ'), + (0x3338, 'M', u'ペニヒ'), + (0x3339, 'M', u'ヘルツ'), + (0x333A, 'M', u'ペンス'), + (0x333B, 'M', u'ページ'), + (0x333C, 'M', u'ベータ'), + (0x333D, 'M', u'ポイント'), + (0x333E, 'M', u'ボルト'), + (0x333F, 'M', u'ホン'), + (0x3340, 'M', u'ポンド'), + (0x3341, 'M', u'ホール'), + (0x3342, 'M', u'ホーン'), + (0x3343, 'M', u'マイクロ'), + (0x3344, 'M', u'マイル'), + (0x3345, 'M', u'マッハ'), + (0x3346, 'M', u'マルク'), + (0x3347, 'M', u'マンション'), + (0x3348, 'M', u'ミクロン'), + (0x3349, 'M', u'ミリ'), + (0x334A, 'M', u'ミリバール'), + (0x334B, 'M', u'メガ'), + (0x334C, 'M', u'メガトン'), + (0x334D, 'M', u'メートル'), + (0x334E, 'M', u'ヤード'), + (0x334F, 'M', u'ヤール'), + (0x3350, 'M', u'ユアン'), + (0x3351, 'M', u'リットル'), + (0x3352, 'M', u'リラ'), + (0x3353, 'M', u'ルピー'), + (0x3354, 'M', u'ルーブル'), + (0x3355, 'M', u'レム'), + (0x3356, 'M', u'レントゲン'), + (0x3357, 'M', u'ワット'), + (0x3358, 'M', u'0点'), + (0x3359, 'M', u'1点'), + (0x335A, 'M', u'2点'), + (0x335B, 'M', u'3点'), + (0x335C, 'M', u'4点'), + (0x335D, 'M', u'5点'), + (0x335E, 'M', u'6点'), + (0x335F, 'M', u'7点'), + (0x3360, 'M', u'8点'), + (0x3361, 'M', u'9点'), + (0x3362, 'M', u'10点'), + (0x3363, 'M', u'11点'), + (0x3364, 'M', u'12点'), + (0x3365, 'M', u'13点'), + (0x3366, 'M', u'14点'), + (0x3367, 'M', u'15点'), + (0x3368, 'M', u'16点'), + (0x3369, 'M', u'17点'), + (0x336A, 'M', u'18点'), + (0x336B, 'M', u'19点'), + (0x336C, 'M', u'20点'), + (0x336D, 'M', u'21点'), + (0x336E, 'M', u'22点'), + (0x336F, 'M', u'23点'), + (0x3370, 'M', u'24点'), + (0x3371, 'M', u'hpa'), + (0x3372, 'M', u'da'), + (0x3373, 'M', u'au'), + (0x3374, 'M', u'bar'), + (0x3375, 'M', u'ov'), + (0x3376, 'M', u'pc'), + (0x3377, 'M', u'dm'), + (0x3378, 'M', u'dm2'), + (0x3379, 'M', u'dm3'), + (0x337A, 'M', u'iu'), + (0x337B, 'M', u'平成'), + (0x337C, 'M', u'昭和'), + (0x337D, 'M', u'大正'), + (0x337E, 'M', u'明治'), + (0x337F, 'M', u'株式会社'), + (0x3380, 'M', u'pa'), + (0x3381, 'M', u'na'), + (0x3382, 'M', u'μa'), + (0x3383, 'M', u'ma'), + (0x3384, 'M', u'ka'), + (0x3385, 'M', u'kb'), + (0x3386, 'M', u'mb'), + (0x3387, 'M', u'gb'), + (0x3388, 'M', u'cal'), + (0x3389, 'M', u'kcal'), + (0x338A, 'M', u'pf'), + (0x338B, 'M', u'nf'), + (0x338C, 'M', u'μf'), + (0x338D, 'M', u'μg'), + (0x338E, 'M', u'mg'), + (0x338F, 'M', u'kg'), + ] + +def _seg_34(): + return [ + (0x3390, 'M', u'hz'), + (0x3391, 'M', u'khz'), + (0x3392, 'M', u'mhz'), + (0x3393, 'M', u'ghz'), + (0x3394, 'M', u'thz'), + (0x3395, 'M', u'μl'), + (0x3396, 'M', u'ml'), + (0x3397, 'M', u'dl'), + (0x3398, 'M', u'kl'), + (0x3399, 'M', u'fm'), + (0x339A, 'M', u'nm'), + (0x339B, 'M', u'μm'), + (0x339C, 'M', u'mm'), + (0x339D, 'M', u'cm'), + (0x339E, 'M', u'km'), + (0x339F, 'M', u'mm2'), + (0x33A0, 'M', u'cm2'), + (0x33A1, 'M', u'm2'), + (0x33A2, 'M', u'km2'), + (0x33A3, 'M', u'mm3'), + (0x33A4, 'M', u'cm3'), + (0x33A5, 'M', u'm3'), + (0x33A6, 'M', u'km3'), + (0x33A7, 'M', u'm∕s'), + (0x33A8, 'M', u'm∕s2'), + (0x33A9, 'M', u'pa'), + (0x33AA, 'M', u'kpa'), + (0x33AB, 'M', u'mpa'), + (0x33AC, 'M', u'gpa'), + (0x33AD, 'M', u'rad'), + (0x33AE, 'M', u'rad∕s'), + (0x33AF, 'M', u'rad∕s2'), + (0x33B0, 'M', u'ps'), + (0x33B1, 'M', u'ns'), + (0x33B2, 'M', u'μs'), + (0x33B3, 'M', u'ms'), + (0x33B4, 'M', u'pv'), + (0x33B5, 'M', u'nv'), + (0x33B6, 'M', u'μv'), + (0x33B7, 'M', u'mv'), + (0x33B8, 'M', u'kv'), + (0x33B9, 'M', u'mv'), + (0x33BA, 'M', u'pw'), + (0x33BB, 'M', u'nw'), + (0x33BC, 'M', u'μw'), + (0x33BD, 'M', u'mw'), + (0x33BE, 'M', u'kw'), + (0x33BF, 'M', u'mw'), + (0x33C0, 'M', u'kω'), + (0x33C1, 'M', u'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', u'bq'), + (0x33C4, 'M', u'cc'), + (0x33C5, 'M', u'cd'), + (0x33C6, 'M', u'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', u'db'), + (0x33C9, 'M', u'gy'), + (0x33CA, 'M', u'ha'), + (0x33CB, 'M', u'hp'), + (0x33CC, 'M', u'in'), + (0x33CD, 'M', u'kk'), + (0x33CE, 'M', u'km'), + (0x33CF, 'M', u'kt'), + (0x33D0, 'M', u'lm'), + (0x33D1, 'M', u'ln'), + (0x33D2, 'M', u'log'), + (0x33D3, 'M', u'lx'), + (0x33D4, 'M', u'mb'), + (0x33D5, 'M', u'mil'), + (0x33D6, 'M', u'mol'), + (0x33D7, 'M', u'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', u'ppm'), + (0x33DA, 'M', u'pr'), + (0x33DB, 'M', u'sr'), + (0x33DC, 'M', u'sv'), + (0x33DD, 'M', u'wb'), + (0x33DE, 'M', u'v∕m'), + (0x33DF, 'M', u'a∕m'), + (0x33E0, 'M', u'1日'), + (0x33E1, 'M', u'2日'), + (0x33E2, 'M', u'3日'), + (0x33E3, 'M', u'4日'), + (0x33E4, 'M', u'5日'), + (0x33E5, 'M', u'6日'), + (0x33E6, 'M', u'7日'), + (0x33E7, 'M', u'8日'), + (0x33E8, 'M', u'9日'), + (0x33E9, 'M', u'10日'), + (0x33EA, 'M', u'11日'), + (0x33EB, 'M', u'12日'), + (0x33EC, 'M', u'13日'), + (0x33ED, 'M', u'14日'), + (0x33EE, 'M', u'15日'), + (0x33EF, 'M', u'16日'), + (0x33F0, 'M', u'17日'), + (0x33F1, 'M', u'18日'), + (0x33F2, 'M', u'19日'), + (0x33F3, 'M', u'20日'), + ] + +def _seg_35(): + return [ + (0x33F4, 'M', u'21日'), + (0x33F5, 'M', u'22日'), + (0x33F6, 'M', u'23日'), + (0x33F7, 'M', u'24日'), + (0x33F8, 'M', u'25日'), + (0x33F9, 'M', u'26日'), + (0x33FA, 'M', u'27日'), + (0x33FB, 'M', u'28日'), + (0x33FC, 'M', u'29日'), + (0x33FD, 'M', u'30日'), + (0x33FE, 'M', u'31日'), + (0x33FF, 'M', u'gal'), + (0x3400, 'V'), + (0x4DB6, 'X'), + (0x4DC0, 'V'), + (0x9FF0, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', u'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', u'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', u'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', u'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', u'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', u'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', u'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', u'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', u'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', u'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', u'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', u'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', u'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', u'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', u'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', u'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', u'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', u'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', u'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', u'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', u'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', u'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', u'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', u'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', u'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', u'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', u'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', u'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', u'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', u'ꚍ'), + (0xA68D, 'V'), + (0xA68E, 'M', u'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', u'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', u'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', u'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', u'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', u'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', u'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', u'ъ'), + (0xA69D, 'M', u'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + ] + +def _seg_36(): + return [ + (0xA700, 'V'), + (0xA722, 'M', u'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', u'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', u'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', u'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', u'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', u'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', u'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', u'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', u'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', u'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', u'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', u'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', u'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', u'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', u'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', u'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', u'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', u'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', u'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', u'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', u'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', u'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', u'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', u'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', u'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', u'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', u'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', u'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', u'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', u'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', u'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', u'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', u'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', u'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', u'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', u'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', u'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', u'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', u'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', u'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', u'ꝼ'), + (0xA77C, 'V'), + (0xA77D, 'M', u'ᵹ'), + (0xA77E, 'M', u'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', u'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', u'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', u'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', u'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', u'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', u'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', u'ꞑ'), + (0xA791, 'V'), + ] + +def _seg_37(): + return [ + (0xA792, 'M', u'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', u'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', u'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', u'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', u'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', u'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', u'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', u'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', u'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', u'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', u'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', u'ɦ'), + (0xA7AB, 'M', u'ɜ'), + (0xA7AC, 'M', u'ɡ'), + (0xA7AD, 'M', u'ɬ'), + (0xA7AE, 'M', u'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', u'ʞ'), + (0xA7B1, 'M', u'ʇ'), + (0xA7B2, 'M', u'ʝ'), + (0xA7B3, 'M', u'ꭓ'), + (0xA7B4, 'M', u'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', u'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'X'), + (0xA7B9, 'V'), + (0xA7BA, 'X'), + (0xA7F7, 'V'), + (0xA7F8, 'M', u'ħ'), + (0xA7F9, 'M', u'œ'), + (0xA7FA, 'V'), + (0xA82C, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', u'ꜧ'), + (0xAB5D, 'M', u'ꬷ'), + (0xAB5E, 'M', u'ɫ'), + (0xAB5F, 'M', u'ꭒ'), + (0xAB60, 'V'), + (0xAB66, 'X'), + (0xAB70, 'M', u'Ꭰ'), + (0xAB71, 'M', u'Ꭱ'), + (0xAB72, 'M', u'Ꭲ'), + (0xAB73, 'M', u'Ꭳ'), + (0xAB74, 'M', u'Ꭴ'), + (0xAB75, 'M', u'Ꭵ'), + (0xAB76, 'M', u'Ꭶ'), + (0xAB77, 'M', u'Ꭷ'), + (0xAB78, 'M', u'Ꭸ'), + (0xAB79, 'M', u'Ꭹ'), + (0xAB7A, 'M', u'Ꭺ'), + ] + +def _seg_38(): + return [ + (0xAB7B, 'M', u'Ꭻ'), + (0xAB7C, 'M', u'Ꭼ'), + (0xAB7D, 'M', u'Ꭽ'), + (0xAB7E, 'M', u'Ꭾ'), + (0xAB7F, 'M', u'Ꭿ'), + (0xAB80, 'M', u'Ꮀ'), + (0xAB81, 'M', u'Ꮁ'), + (0xAB82, 'M', u'Ꮂ'), + (0xAB83, 'M', u'Ꮃ'), + (0xAB84, 'M', u'Ꮄ'), + (0xAB85, 'M', u'Ꮅ'), + (0xAB86, 'M', u'Ꮆ'), + (0xAB87, 'M', u'Ꮇ'), + (0xAB88, 'M', u'Ꮈ'), + (0xAB89, 'M', u'Ꮉ'), + (0xAB8A, 'M', u'Ꮊ'), + (0xAB8B, 'M', u'Ꮋ'), + (0xAB8C, 'M', u'Ꮌ'), + (0xAB8D, 'M', u'Ꮍ'), + (0xAB8E, 'M', u'Ꮎ'), + (0xAB8F, 'M', u'Ꮏ'), + (0xAB90, 'M', u'Ꮐ'), + (0xAB91, 'M', u'Ꮑ'), + (0xAB92, 'M', u'Ꮒ'), + (0xAB93, 'M', u'Ꮓ'), + (0xAB94, 'M', u'Ꮔ'), + (0xAB95, 'M', u'Ꮕ'), + (0xAB96, 'M', u'Ꮖ'), + (0xAB97, 'M', u'Ꮗ'), + (0xAB98, 'M', u'Ꮘ'), + (0xAB99, 'M', u'Ꮙ'), + (0xAB9A, 'M', u'Ꮚ'), + (0xAB9B, 'M', u'Ꮛ'), + (0xAB9C, 'M', u'Ꮜ'), + (0xAB9D, 'M', u'Ꮝ'), + (0xAB9E, 'M', u'Ꮞ'), + (0xAB9F, 'M', u'Ꮟ'), + (0xABA0, 'M', u'Ꮠ'), + (0xABA1, 'M', u'Ꮡ'), + (0xABA2, 'M', u'Ꮢ'), + (0xABA3, 'M', u'Ꮣ'), + (0xABA4, 'M', u'Ꮤ'), + (0xABA5, 'M', u'Ꮥ'), + (0xABA6, 'M', u'Ꮦ'), + (0xABA7, 'M', u'Ꮧ'), + (0xABA8, 'M', u'Ꮨ'), + (0xABA9, 'M', u'Ꮩ'), + (0xABAA, 'M', u'Ꮪ'), + (0xABAB, 'M', u'Ꮫ'), + (0xABAC, 'M', u'Ꮬ'), + (0xABAD, 'M', u'Ꮭ'), + (0xABAE, 'M', u'Ꮮ'), + (0xABAF, 'M', u'Ꮯ'), + (0xABB0, 'M', u'Ꮰ'), + (0xABB1, 'M', u'Ꮱ'), + (0xABB2, 'M', u'Ꮲ'), + (0xABB3, 'M', u'Ꮳ'), + (0xABB4, 'M', u'Ꮴ'), + (0xABB5, 'M', u'Ꮵ'), + (0xABB6, 'M', u'Ꮶ'), + (0xABB7, 'M', u'Ꮷ'), + (0xABB8, 'M', u'Ꮸ'), + (0xABB9, 'M', u'Ꮹ'), + (0xABBA, 'M', u'Ꮺ'), + (0xABBB, 'M', u'Ꮻ'), + (0xABBC, 'M', u'Ꮼ'), + (0xABBD, 'M', u'Ꮽ'), + (0xABBE, 'M', u'Ꮾ'), + (0xABBF, 'M', u'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', u'豈'), + (0xF901, 'M', u'更'), + (0xF902, 'M', u'車'), + (0xF903, 'M', u'賈'), + (0xF904, 'M', u'滑'), + (0xF905, 'M', u'串'), + (0xF906, 'M', u'句'), + (0xF907, 'M', u'龜'), + (0xF909, 'M', u'契'), + (0xF90A, 'M', u'金'), + (0xF90B, 'M', u'喇'), + (0xF90C, 'M', u'奈'), + (0xF90D, 'M', u'懶'), + (0xF90E, 'M', u'癩'), + (0xF90F, 'M', u'羅'), + (0xF910, 'M', u'蘿'), + (0xF911, 'M', u'螺'), + (0xF912, 'M', u'裸'), + (0xF913, 'M', u'邏'), + (0xF914, 'M', u'樂'), + (0xF915, 'M', u'洛'), + ] + +def _seg_39(): + return [ + (0xF916, 'M', u'烙'), + (0xF917, 'M', u'珞'), + (0xF918, 'M', u'落'), + (0xF919, 'M', u'酪'), + (0xF91A, 'M', u'駱'), + (0xF91B, 'M', u'亂'), + (0xF91C, 'M', u'卵'), + (0xF91D, 'M', u'欄'), + (0xF91E, 'M', u'爛'), + (0xF91F, 'M', u'蘭'), + (0xF920, 'M', u'鸞'), + (0xF921, 'M', u'嵐'), + (0xF922, 'M', u'濫'), + (0xF923, 'M', u'藍'), + (0xF924, 'M', u'襤'), + (0xF925, 'M', u'拉'), + (0xF926, 'M', u'臘'), + (0xF927, 'M', u'蠟'), + (0xF928, 'M', u'廊'), + (0xF929, 'M', u'朗'), + (0xF92A, 'M', u'浪'), + (0xF92B, 'M', u'狼'), + (0xF92C, 'M', u'郎'), + (0xF92D, 'M', u'來'), + (0xF92E, 'M', u'冷'), + (0xF92F, 'M', u'勞'), + (0xF930, 'M', u'擄'), + (0xF931, 'M', u'櫓'), + (0xF932, 'M', u'爐'), + (0xF933, 'M', u'盧'), + (0xF934, 'M', u'老'), + (0xF935, 'M', u'蘆'), + (0xF936, 'M', u'虜'), + (0xF937, 'M', u'路'), + (0xF938, 'M', u'露'), + (0xF939, 'M', u'魯'), + (0xF93A, 'M', u'鷺'), + (0xF93B, 'M', u'碌'), + (0xF93C, 'M', u'祿'), + (0xF93D, 'M', u'綠'), + (0xF93E, 'M', u'菉'), + (0xF93F, 'M', u'錄'), + (0xF940, 'M', u'鹿'), + (0xF941, 'M', u'論'), + (0xF942, 'M', u'壟'), + (0xF943, 'M', u'弄'), + (0xF944, 'M', u'籠'), + (0xF945, 'M', u'聾'), + (0xF946, 'M', u'牢'), + (0xF947, 'M', u'磊'), + (0xF948, 'M', u'賂'), + (0xF949, 'M', u'雷'), + (0xF94A, 'M', u'壘'), + (0xF94B, 'M', u'屢'), + (0xF94C, 'M', u'樓'), + (0xF94D, 'M', u'淚'), + (0xF94E, 'M', u'漏'), + (0xF94F, 'M', u'累'), + (0xF950, 'M', u'縷'), + (0xF951, 'M', u'陋'), + (0xF952, 'M', u'勒'), + (0xF953, 'M', u'肋'), + (0xF954, 'M', u'凜'), + (0xF955, 'M', u'凌'), + (0xF956, 'M', u'稜'), + (0xF957, 'M', u'綾'), + (0xF958, 'M', u'菱'), + (0xF959, 'M', u'陵'), + (0xF95A, 'M', u'讀'), + (0xF95B, 'M', u'拏'), + (0xF95C, 'M', u'樂'), + (0xF95D, 'M', u'諾'), + (0xF95E, 'M', u'丹'), + (0xF95F, 'M', u'寧'), + (0xF960, 'M', u'怒'), + (0xF961, 'M', u'率'), + (0xF962, 'M', u'異'), + (0xF963, 'M', u'北'), + (0xF964, 'M', u'磻'), + (0xF965, 'M', u'便'), + (0xF966, 'M', u'復'), + (0xF967, 'M', u'不'), + (0xF968, 'M', u'泌'), + (0xF969, 'M', u'數'), + (0xF96A, 'M', u'索'), + (0xF96B, 'M', u'參'), + (0xF96C, 'M', u'塞'), + (0xF96D, 'M', u'省'), + (0xF96E, 'M', u'葉'), + (0xF96F, 'M', u'說'), + (0xF970, 'M', u'殺'), + (0xF971, 'M', u'辰'), + (0xF972, 'M', u'沈'), + (0xF973, 'M', u'拾'), + (0xF974, 'M', u'若'), + (0xF975, 'M', u'掠'), + (0xF976, 'M', u'略'), + (0xF977, 'M', u'亮'), + (0xF978, 'M', u'兩'), + (0xF979, 'M', u'凉'), + ] + +def _seg_40(): + return [ + (0xF97A, 'M', u'梁'), + (0xF97B, 'M', u'糧'), + (0xF97C, 'M', u'良'), + (0xF97D, 'M', u'諒'), + (0xF97E, 'M', u'量'), + (0xF97F, 'M', u'勵'), + (0xF980, 'M', u'呂'), + (0xF981, 'M', u'女'), + (0xF982, 'M', u'廬'), + (0xF983, 'M', u'旅'), + (0xF984, 'M', u'濾'), + (0xF985, 'M', u'礪'), + (0xF986, 'M', u'閭'), + (0xF987, 'M', u'驪'), + (0xF988, 'M', u'麗'), + (0xF989, 'M', u'黎'), + (0xF98A, 'M', u'力'), + (0xF98B, 'M', u'曆'), + (0xF98C, 'M', u'歷'), + (0xF98D, 'M', u'轢'), + (0xF98E, 'M', u'年'), + (0xF98F, 'M', u'憐'), + (0xF990, 'M', u'戀'), + (0xF991, 'M', u'撚'), + (0xF992, 'M', u'漣'), + (0xF993, 'M', u'煉'), + (0xF994, 'M', u'璉'), + (0xF995, 'M', u'秊'), + (0xF996, 'M', u'練'), + (0xF997, 'M', u'聯'), + (0xF998, 'M', u'輦'), + (0xF999, 'M', u'蓮'), + (0xF99A, 'M', u'連'), + (0xF99B, 'M', u'鍊'), + (0xF99C, 'M', u'列'), + (0xF99D, 'M', u'劣'), + (0xF99E, 'M', u'咽'), + (0xF99F, 'M', u'烈'), + (0xF9A0, 'M', u'裂'), + (0xF9A1, 'M', u'說'), + (0xF9A2, 'M', u'廉'), + (0xF9A3, 'M', u'念'), + (0xF9A4, 'M', u'捻'), + (0xF9A5, 'M', u'殮'), + (0xF9A6, 'M', u'簾'), + (0xF9A7, 'M', u'獵'), + (0xF9A8, 'M', u'令'), + (0xF9A9, 'M', u'囹'), + (0xF9AA, 'M', u'寧'), + (0xF9AB, 'M', u'嶺'), + (0xF9AC, 'M', u'怜'), + (0xF9AD, 'M', u'玲'), + (0xF9AE, 'M', u'瑩'), + (0xF9AF, 'M', u'羚'), + (0xF9B0, 'M', u'聆'), + (0xF9B1, 'M', u'鈴'), + (0xF9B2, 'M', u'零'), + (0xF9B3, 'M', u'靈'), + (0xF9B4, 'M', u'領'), + (0xF9B5, 'M', u'例'), + (0xF9B6, 'M', u'禮'), + (0xF9B7, 'M', u'醴'), + (0xF9B8, 'M', u'隸'), + (0xF9B9, 'M', u'惡'), + (0xF9BA, 'M', u'了'), + (0xF9BB, 'M', u'僚'), + (0xF9BC, 'M', u'寮'), + (0xF9BD, 'M', u'尿'), + (0xF9BE, 'M', u'料'), + (0xF9BF, 'M', u'樂'), + (0xF9C0, 'M', u'燎'), + (0xF9C1, 'M', u'療'), + (0xF9C2, 'M', u'蓼'), + (0xF9C3, 'M', u'遼'), + (0xF9C4, 'M', u'龍'), + (0xF9C5, 'M', u'暈'), + (0xF9C6, 'M', u'阮'), + (0xF9C7, 'M', u'劉'), + (0xF9C8, 'M', u'杻'), + (0xF9C9, 'M', u'柳'), + (0xF9CA, 'M', u'流'), + (0xF9CB, 'M', u'溜'), + (0xF9CC, 'M', u'琉'), + (0xF9CD, 'M', u'留'), + (0xF9CE, 'M', u'硫'), + (0xF9CF, 'M', u'紐'), + (0xF9D0, 'M', u'類'), + (0xF9D1, 'M', u'六'), + (0xF9D2, 'M', u'戮'), + (0xF9D3, 'M', u'陸'), + (0xF9D4, 'M', u'倫'), + (0xF9D5, 'M', u'崙'), + (0xF9D6, 'M', u'淪'), + (0xF9D7, 'M', u'輪'), + (0xF9D8, 'M', u'律'), + (0xF9D9, 'M', u'慄'), + (0xF9DA, 'M', u'栗'), + (0xF9DB, 'M', u'率'), + (0xF9DC, 'M', u'隆'), + (0xF9DD, 'M', u'利'), + ] + +def _seg_41(): + return [ + (0xF9DE, 'M', u'吏'), + (0xF9DF, 'M', u'履'), + (0xF9E0, 'M', u'易'), + (0xF9E1, 'M', u'李'), + (0xF9E2, 'M', u'梨'), + (0xF9E3, 'M', u'泥'), + (0xF9E4, 'M', u'理'), + (0xF9E5, 'M', u'痢'), + (0xF9E6, 'M', u'罹'), + (0xF9E7, 'M', u'裏'), + (0xF9E8, 'M', u'裡'), + (0xF9E9, 'M', u'里'), + (0xF9EA, 'M', u'離'), + (0xF9EB, 'M', u'匿'), + (0xF9EC, 'M', u'溺'), + (0xF9ED, 'M', u'吝'), + (0xF9EE, 'M', u'燐'), + (0xF9EF, 'M', u'璘'), + (0xF9F0, 'M', u'藺'), + (0xF9F1, 'M', u'隣'), + (0xF9F2, 'M', u'鱗'), + (0xF9F3, 'M', u'麟'), + (0xF9F4, 'M', u'林'), + (0xF9F5, 'M', u'淋'), + (0xF9F6, 'M', u'臨'), + (0xF9F7, 'M', u'立'), + (0xF9F8, 'M', u'笠'), + (0xF9F9, 'M', u'粒'), + (0xF9FA, 'M', u'狀'), + (0xF9FB, 'M', u'炙'), + (0xF9FC, 'M', u'識'), + (0xF9FD, 'M', u'什'), + (0xF9FE, 'M', u'茶'), + (0xF9FF, 'M', u'刺'), + (0xFA00, 'M', u'切'), + (0xFA01, 'M', u'度'), + (0xFA02, 'M', u'拓'), + (0xFA03, 'M', u'糖'), + (0xFA04, 'M', u'宅'), + (0xFA05, 'M', u'洞'), + (0xFA06, 'M', u'暴'), + (0xFA07, 'M', u'輻'), + (0xFA08, 'M', u'行'), + (0xFA09, 'M', u'降'), + (0xFA0A, 'M', u'見'), + (0xFA0B, 'M', u'廓'), + (0xFA0C, 'M', u'兀'), + (0xFA0D, 'M', u'嗀'), + (0xFA0E, 'V'), + (0xFA10, 'M', u'塚'), + (0xFA11, 'V'), + (0xFA12, 'M', u'晴'), + (0xFA13, 'V'), + (0xFA15, 'M', u'凞'), + (0xFA16, 'M', u'猪'), + (0xFA17, 'M', u'益'), + (0xFA18, 'M', u'礼'), + (0xFA19, 'M', u'神'), + (0xFA1A, 'M', u'祥'), + (0xFA1B, 'M', u'福'), + (0xFA1C, 'M', u'靖'), + (0xFA1D, 'M', u'精'), + (0xFA1E, 'M', u'羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', u'蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', u'諸'), + (0xFA23, 'V'), + (0xFA25, 'M', u'逸'), + (0xFA26, 'M', u'都'), + (0xFA27, 'V'), + (0xFA2A, 'M', u'飯'), + (0xFA2B, 'M', u'飼'), + (0xFA2C, 'M', u'館'), + (0xFA2D, 'M', u'鶴'), + (0xFA2E, 'M', u'郞'), + (0xFA2F, 'M', u'隷'), + (0xFA30, 'M', u'侮'), + (0xFA31, 'M', u'僧'), + (0xFA32, 'M', u'免'), + (0xFA33, 'M', u'勉'), + (0xFA34, 'M', u'勤'), + (0xFA35, 'M', u'卑'), + (0xFA36, 'M', u'喝'), + (0xFA37, 'M', u'嘆'), + (0xFA38, 'M', u'器'), + (0xFA39, 'M', u'塀'), + (0xFA3A, 'M', u'墨'), + (0xFA3B, 'M', u'層'), + (0xFA3C, 'M', u'屮'), + (0xFA3D, 'M', u'悔'), + (0xFA3E, 'M', u'慨'), + (0xFA3F, 'M', u'憎'), + (0xFA40, 'M', u'懲'), + (0xFA41, 'M', u'敏'), + (0xFA42, 'M', u'既'), + (0xFA43, 'M', u'暑'), + (0xFA44, 'M', u'梅'), + (0xFA45, 'M', u'海'), + (0xFA46, 'M', u'渚'), + ] + +def _seg_42(): + return [ + (0xFA47, 'M', u'漢'), + (0xFA48, 'M', u'煮'), + (0xFA49, 'M', u'爫'), + (0xFA4A, 'M', u'琢'), + (0xFA4B, 'M', u'碑'), + (0xFA4C, 'M', u'社'), + (0xFA4D, 'M', u'祉'), + (0xFA4E, 'M', u'祈'), + (0xFA4F, 'M', u'祐'), + (0xFA50, 'M', u'祖'), + (0xFA51, 'M', u'祝'), + (0xFA52, 'M', u'禍'), + (0xFA53, 'M', u'禎'), + (0xFA54, 'M', u'穀'), + (0xFA55, 'M', u'突'), + (0xFA56, 'M', u'節'), + (0xFA57, 'M', u'練'), + (0xFA58, 'M', u'縉'), + (0xFA59, 'M', u'繁'), + (0xFA5A, 'M', u'署'), + (0xFA5B, 'M', u'者'), + (0xFA5C, 'M', u'臭'), + (0xFA5D, 'M', u'艹'), + (0xFA5F, 'M', u'著'), + (0xFA60, 'M', u'褐'), + (0xFA61, 'M', u'視'), + (0xFA62, 'M', u'謁'), + (0xFA63, 'M', u'謹'), + (0xFA64, 'M', u'賓'), + (0xFA65, 'M', u'贈'), + (0xFA66, 'M', u'辶'), + (0xFA67, 'M', u'逸'), + (0xFA68, 'M', u'難'), + (0xFA69, 'M', u'響'), + (0xFA6A, 'M', u'頻'), + (0xFA6B, 'M', u'恵'), + (0xFA6C, 'M', u'𤋮'), + (0xFA6D, 'M', u'舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', u'並'), + (0xFA71, 'M', u'况'), + (0xFA72, 'M', u'全'), + (0xFA73, 'M', u'侀'), + (0xFA74, 'M', u'充'), + (0xFA75, 'M', u'冀'), + (0xFA76, 'M', u'勇'), + (0xFA77, 'M', u'勺'), + (0xFA78, 'M', u'喝'), + (0xFA79, 'M', u'啕'), + (0xFA7A, 'M', u'喙'), + (0xFA7B, 'M', u'嗢'), + (0xFA7C, 'M', u'塚'), + (0xFA7D, 'M', u'墳'), + (0xFA7E, 'M', u'奄'), + (0xFA7F, 'M', u'奔'), + (0xFA80, 'M', u'婢'), + (0xFA81, 'M', u'嬨'), + (0xFA82, 'M', u'廒'), + (0xFA83, 'M', u'廙'), + (0xFA84, 'M', u'彩'), + (0xFA85, 'M', u'徭'), + (0xFA86, 'M', u'惘'), + (0xFA87, 'M', u'慎'), + (0xFA88, 'M', u'愈'), + (0xFA89, 'M', u'憎'), + (0xFA8A, 'M', u'慠'), + (0xFA8B, 'M', u'懲'), + (0xFA8C, 'M', u'戴'), + (0xFA8D, 'M', u'揄'), + (0xFA8E, 'M', u'搜'), + (0xFA8F, 'M', u'摒'), + (0xFA90, 'M', u'敖'), + (0xFA91, 'M', u'晴'), + (0xFA92, 'M', u'朗'), + (0xFA93, 'M', u'望'), + (0xFA94, 'M', u'杖'), + (0xFA95, 'M', u'歹'), + (0xFA96, 'M', u'殺'), + (0xFA97, 'M', u'流'), + (0xFA98, 'M', u'滛'), + (0xFA99, 'M', u'滋'), + (0xFA9A, 'M', u'漢'), + (0xFA9B, 'M', u'瀞'), + (0xFA9C, 'M', u'煮'), + (0xFA9D, 'M', u'瞧'), + (0xFA9E, 'M', u'爵'), + (0xFA9F, 'M', u'犯'), + (0xFAA0, 'M', u'猪'), + (0xFAA1, 'M', u'瑱'), + (0xFAA2, 'M', u'甆'), + (0xFAA3, 'M', u'画'), + (0xFAA4, 'M', u'瘝'), + (0xFAA5, 'M', u'瘟'), + (0xFAA6, 'M', u'益'), + (0xFAA7, 'M', u'盛'), + (0xFAA8, 'M', u'直'), + (0xFAA9, 'M', u'睊'), + (0xFAAA, 'M', u'着'), + (0xFAAB, 'M', u'磌'), + (0xFAAC, 'M', u'窱'), + ] + +def _seg_43(): + return [ + (0xFAAD, 'M', u'節'), + (0xFAAE, 'M', u'类'), + (0xFAAF, 'M', u'絛'), + (0xFAB0, 'M', u'練'), + (0xFAB1, 'M', u'缾'), + (0xFAB2, 'M', u'者'), + (0xFAB3, 'M', u'荒'), + (0xFAB4, 'M', u'華'), + (0xFAB5, 'M', u'蝹'), + (0xFAB6, 'M', u'襁'), + (0xFAB7, 'M', u'覆'), + (0xFAB8, 'M', u'視'), + (0xFAB9, 'M', u'調'), + (0xFABA, 'M', u'諸'), + (0xFABB, 'M', u'請'), + (0xFABC, 'M', u'謁'), + (0xFABD, 'M', u'諾'), + (0xFABE, 'M', u'諭'), + (0xFABF, 'M', u'謹'), + (0xFAC0, 'M', u'變'), + (0xFAC1, 'M', u'贈'), + (0xFAC2, 'M', u'輸'), + (0xFAC3, 'M', u'遲'), + (0xFAC4, 'M', u'醙'), + (0xFAC5, 'M', u'鉶'), + (0xFAC6, 'M', u'陼'), + (0xFAC7, 'M', u'難'), + (0xFAC8, 'M', u'靖'), + (0xFAC9, 'M', u'韛'), + (0xFACA, 'M', u'響'), + (0xFACB, 'M', u'頋'), + (0xFACC, 'M', u'頻'), + (0xFACD, 'M', u'鬒'), + (0xFACE, 'M', u'龜'), + (0xFACF, 'M', u'𢡊'), + (0xFAD0, 'M', u'𢡄'), + (0xFAD1, 'M', u'𣏕'), + (0xFAD2, 'M', u'㮝'), + (0xFAD3, 'M', u'䀘'), + (0xFAD4, 'M', u'䀹'), + (0xFAD5, 'M', u'𥉉'), + (0xFAD6, 'M', u'𥳐'), + (0xFAD7, 'M', u'𧻓'), + (0xFAD8, 'M', u'齃'), + (0xFAD9, 'M', u'龎'), + (0xFADA, 'X'), + (0xFB00, 'M', u'ff'), + (0xFB01, 'M', u'fi'), + (0xFB02, 'M', u'fl'), + (0xFB03, 'M', u'ffi'), + (0xFB04, 'M', u'ffl'), + (0xFB05, 'M', u'st'), + (0xFB07, 'X'), + (0xFB13, 'M', u'մն'), + (0xFB14, 'M', u'մե'), + (0xFB15, 'M', u'մի'), + (0xFB16, 'M', u'վն'), + (0xFB17, 'M', u'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', u'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', u'ײַ'), + (0xFB20, 'M', u'ע'), + (0xFB21, 'M', u'א'), + (0xFB22, 'M', u'ד'), + (0xFB23, 'M', u'ה'), + (0xFB24, 'M', u'כ'), + (0xFB25, 'M', u'ל'), + (0xFB26, 'M', u'ם'), + (0xFB27, 'M', u'ר'), + (0xFB28, 'M', u'ת'), + (0xFB29, '3', u'+'), + (0xFB2A, 'M', u'שׁ'), + (0xFB2B, 'M', u'שׂ'), + (0xFB2C, 'M', u'שּׁ'), + (0xFB2D, 'M', u'שּׂ'), + (0xFB2E, 'M', u'אַ'), + (0xFB2F, 'M', u'אָ'), + (0xFB30, 'M', u'אּ'), + (0xFB31, 'M', u'בּ'), + (0xFB32, 'M', u'גּ'), + (0xFB33, 'M', u'דּ'), + (0xFB34, 'M', u'הּ'), + (0xFB35, 'M', u'וּ'), + (0xFB36, 'M', u'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', u'טּ'), + (0xFB39, 'M', u'יּ'), + (0xFB3A, 'M', u'ךּ'), + (0xFB3B, 'M', u'כּ'), + (0xFB3C, 'M', u'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', u'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', u'נּ'), + (0xFB41, 'M', u'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', u'ףּ'), + (0xFB44, 'M', u'פּ'), + (0xFB45, 'X'), + ] + +def _seg_44(): + return [ + (0xFB46, 'M', u'צּ'), + (0xFB47, 'M', u'קּ'), + (0xFB48, 'M', u'רּ'), + (0xFB49, 'M', u'שּ'), + (0xFB4A, 'M', u'תּ'), + (0xFB4B, 'M', u'וֹ'), + (0xFB4C, 'M', u'בֿ'), + (0xFB4D, 'M', u'כֿ'), + (0xFB4E, 'M', u'פֿ'), + (0xFB4F, 'M', u'אל'), + (0xFB50, 'M', u'ٱ'), + (0xFB52, 'M', u'ٻ'), + (0xFB56, 'M', u'پ'), + (0xFB5A, 'M', u'ڀ'), + (0xFB5E, 'M', u'ٺ'), + (0xFB62, 'M', u'ٿ'), + (0xFB66, 'M', u'ٹ'), + (0xFB6A, 'M', u'ڤ'), + (0xFB6E, 'M', u'ڦ'), + (0xFB72, 'M', u'ڄ'), + (0xFB76, 'M', u'ڃ'), + (0xFB7A, 'M', u'چ'), + (0xFB7E, 'M', u'ڇ'), + (0xFB82, 'M', u'ڍ'), + (0xFB84, 'M', u'ڌ'), + (0xFB86, 'M', u'ڎ'), + (0xFB88, 'M', u'ڈ'), + (0xFB8A, 'M', u'ژ'), + (0xFB8C, 'M', u'ڑ'), + (0xFB8E, 'M', u'ک'), + (0xFB92, 'M', u'گ'), + (0xFB96, 'M', u'ڳ'), + (0xFB9A, 'M', u'ڱ'), + (0xFB9E, 'M', u'ں'), + (0xFBA0, 'M', u'ڻ'), + (0xFBA4, 'M', u'ۀ'), + (0xFBA6, 'M', u'ہ'), + (0xFBAA, 'M', u'ھ'), + (0xFBAE, 'M', u'ے'), + (0xFBB0, 'M', u'ۓ'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', u'ڭ'), + (0xFBD7, 'M', u'ۇ'), + (0xFBD9, 'M', u'ۆ'), + (0xFBDB, 'M', u'ۈ'), + (0xFBDD, 'M', u'ۇٴ'), + (0xFBDE, 'M', u'ۋ'), + (0xFBE0, 'M', u'ۅ'), + (0xFBE2, 'M', u'ۉ'), + (0xFBE4, 'M', u'ې'), + (0xFBE8, 'M', u'ى'), + (0xFBEA, 'M', u'ئا'), + (0xFBEC, 'M', u'ئە'), + (0xFBEE, 'M', u'ئو'), + (0xFBF0, 'M', u'ئۇ'), + (0xFBF2, 'M', u'ئۆ'), + (0xFBF4, 'M', u'ئۈ'), + (0xFBF6, 'M', u'ئې'), + (0xFBF9, 'M', u'ئى'), + (0xFBFC, 'M', u'ی'), + (0xFC00, 'M', u'ئج'), + (0xFC01, 'M', u'ئح'), + (0xFC02, 'M', u'ئم'), + (0xFC03, 'M', u'ئى'), + (0xFC04, 'M', u'ئي'), + (0xFC05, 'M', u'بج'), + (0xFC06, 'M', u'بح'), + (0xFC07, 'M', u'بخ'), + (0xFC08, 'M', u'بم'), + (0xFC09, 'M', u'بى'), + (0xFC0A, 'M', u'بي'), + (0xFC0B, 'M', u'تج'), + (0xFC0C, 'M', u'تح'), + (0xFC0D, 'M', u'تخ'), + (0xFC0E, 'M', u'تم'), + (0xFC0F, 'M', u'تى'), + (0xFC10, 'M', u'تي'), + (0xFC11, 'M', u'ثج'), + (0xFC12, 'M', u'ثم'), + (0xFC13, 'M', u'ثى'), + (0xFC14, 'M', u'ثي'), + (0xFC15, 'M', u'جح'), + (0xFC16, 'M', u'جم'), + (0xFC17, 'M', u'حج'), + (0xFC18, 'M', u'حم'), + (0xFC19, 'M', u'خج'), + (0xFC1A, 'M', u'خح'), + (0xFC1B, 'M', u'خم'), + (0xFC1C, 'M', u'سج'), + (0xFC1D, 'M', u'سح'), + (0xFC1E, 'M', u'سخ'), + (0xFC1F, 'M', u'سم'), + (0xFC20, 'M', u'صح'), + (0xFC21, 'M', u'صم'), + (0xFC22, 'M', u'ضج'), + (0xFC23, 'M', u'ضح'), + (0xFC24, 'M', u'ضخ'), + (0xFC25, 'M', u'ضم'), + (0xFC26, 'M', u'طح'), + ] + +def _seg_45(): + return [ + (0xFC27, 'M', u'طم'), + (0xFC28, 'M', u'ظم'), + (0xFC29, 'M', u'عج'), + (0xFC2A, 'M', u'عم'), + (0xFC2B, 'M', u'غج'), + (0xFC2C, 'M', u'غم'), + (0xFC2D, 'M', u'فج'), + (0xFC2E, 'M', u'فح'), + (0xFC2F, 'M', u'فخ'), + (0xFC30, 'M', u'فم'), + (0xFC31, 'M', u'فى'), + (0xFC32, 'M', u'في'), + (0xFC33, 'M', u'قح'), + (0xFC34, 'M', u'قم'), + (0xFC35, 'M', u'قى'), + (0xFC36, 'M', u'قي'), + (0xFC37, 'M', u'كا'), + (0xFC38, 'M', u'كج'), + (0xFC39, 'M', u'كح'), + (0xFC3A, 'M', u'كخ'), + (0xFC3B, 'M', u'كل'), + (0xFC3C, 'M', u'كم'), + (0xFC3D, 'M', u'كى'), + (0xFC3E, 'M', u'كي'), + (0xFC3F, 'M', u'لج'), + (0xFC40, 'M', u'لح'), + (0xFC41, 'M', u'لخ'), + (0xFC42, 'M', u'لم'), + (0xFC43, 'M', u'لى'), + (0xFC44, 'M', u'لي'), + (0xFC45, 'M', u'مج'), + (0xFC46, 'M', u'مح'), + (0xFC47, 'M', u'مخ'), + (0xFC48, 'M', u'مم'), + (0xFC49, 'M', u'مى'), + (0xFC4A, 'M', u'مي'), + (0xFC4B, 'M', u'نج'), + (0xFC4C, 'M', u'نح'), + (0xFC4D, 'M', u'نخ'), + (0xFC4E, 'M', u'نم'), + (0xFC4F, 'M', u'نى'), + (0xFC50, 'M', u'ني'), + (0xFC51, 'M', u'هج'), + (0xFC52, 'M', u'هم'), + (0xFC53, 'M', u'هى'), + (0xFC54, 'M', u'هي'), + (0xFC55, 'M', u'يج'), + (0xFC56, 'M', u'يح'), + (0xFC57, 'M', u'يخ'), + (0xFC58, 'M', u'يم'), + (0xFC59, 'M', u'يى'), + (0xFC5A, 'M', u'يي'), + (0xFC5B, 'M', u'ذٰ'), + (0xFC5C, 'M', u'رٰ'), + (0xFC5D, 'M', u'ىٰ'), + (0xFC5E, '3', u' ٌّ'), + (0xFC5F, '3', u' ٍّ'), + (0xFC60, '3', u' َّ'), + (0xFC61, '3', u' ُّ'), + (0xFC62, '3', u' ِّ'), + (0xFC63, '3', u' ّٰ'), + (0xFC64, 'M', u'ئر'), + (0xFC65, 'M', u'ئز'), + (0xFC66, 'M', u'ئم'), + (0xFC67, 'M', u'ئن'), + (0xFC68, 'M', u'ئى'), + (0xFC69, 'M', u'ئي'), + (0xFC6A, 'M', u'بر'), + (0xFC6B, 'M', u'بز'), + (0xFC6C, 'M', u'بم'), + (0xFC6D, 'M', u'بن'), + (0xFC6E, 'M', u'بى'), + (0xFC6F, 'M', u'بي'), + (0xFC70, 'M', u'تر'), + (0xFC71, 'M', u'تز'), + (0xFC72, 'M', u'تم'), + (0xFC73, 'M', u'تن'), + (0xFC74, 'M', u'تى'), + (0xFC75, 'M', u'تي'), + (0xFC76, 'M', u'ثر'), + (0xFC77, 'M', u'ثز'), + (0xFC78, 'M', u'ثم'), + (0xFC79, 'M', u'ثن'), + (0xFC7A, 'M', u'ثى'), + (0xFC7B, 'M', u'ثي'), + (0xFC7C, 'M', u'فى'), + (0xFC7D, 'M', u'في'), + (0xFC7E, 'M', u'قى'), + (0xFC7F, 'M', u'قي'), + (0xFC80, 'M', u'كا'), + (0xFC81, 'M', u'كل'), + (0xFC82, 'M', u'كم'), + (0xFC83, 'M', u'كى'), + (0xFC84, 'M', u'كي'), + (0xFC85, 'M', u'لم'), + (0xFC86, 'M', u'لى'), + (0xFC87, 'M', u'لي'), + (0xFC88, 'M', u'ما'), + (0xFC89, 'M', u'مم'), + (0xFC8A, 'M', u'نر'), + ] + +def _seg_46(): + return [ + (0xFC8B, 'M', u'نز'), + (0xFC8C, 'M', u'نم'), + (0xFC8D, 'M', u'نن'), + (0xFC8E, 'M', u'نى'), + (0xFC8F, 'M', u'ني'), + (0xFC90, 'M', u'ىٰ'), + (0xFC91, 'M', u'ير'), + (0xFC92, 'M', u'يز'), + (0xFC93, 'M', u'يم'), + (0xFC94, 'M', u'ين'), + (0xFC95, 'M', u'يى'), + (0xFC96, 'M', u'يي'), + (0xFC97, 'M', u'ئج'), + (0xFC98, 'M', u'ئح'), + (0xFC99, 'M', u'ئخ'), + (0xFC9A, 'M', u'ئم'), + (0xFC9B, 'M', u'ئه'), + (0xFC9C, 'M', u'بج'), + (0xFC9D, 'M', u'بح'), + (0xFC9E, 'M', u'بخ'), + (0xFC9F, 'M', u'بم'), + (0xFCA0, 'M', u'به'), + (0xFCA1, 'M', u'تج'), + (0xFCA2, 'M', u'تح'), + (0xFCA3, 'M', u'تخ'), + (0xFCA4, 'M', u'تم'), + (0xFCA5, 'M', u'ته'), + (0xFCA6, 'M', u'ثم'), + (0xFCA7, 'M', u'جح'), + (0xFCA8, 'M', u'جم'), + (0xFCA9, 'M', u'حج'), + (0xFCAA, 'M', u'حم'), + (0xFCAB, 'M', u'خج'), + (0xFCAC, 'M', u'خم'), + (0xFCAD, 'M', u'سج'), + (0xFCAE, 'M', u'سح'), + (0xFCAF, 'M', u'سخ'), + (0xFCB0, 'M', u'سم'), + (0xFCB1, 'M', u'صح'), + (0xFCB2, 'M', u'صخ'), + (0xFCB3, 'M', u'صم'), + (0xFCB4, 'M', u'ضج'), + (0xFCB5, 'M', u'ضح'), + (0xFCB6, 'M', u'ضخ'), + (0xFCB7, 'M', u'ضم'), + (0xFCB8, 'M', u'طح'), + (0xFCB9, 'M', u'ظم'), + (0xFCBA, 'M', u'عج'), + (0xFCBB, 'M', u'عم'), + (0xFCBC, 'M', u'غج'), + (0xFCBD, 'M', u'غم'), + (0xFCBE, 'M', u'فج'), + (0xFCBF, 'M', u'فح'), + (0xFCC0, 'M', u'فخ'), + (0xFCC1, 'M', u'فم'), + (0xFCC2, 'M', u'قح'), + (0xFCC3, 'M', u'قم'), + (0xFCC4, 'M', u'كج'), + (0xFCC5, 'M', u'كح'), + (0xFCC6, 'M', u'كخ'), + (0xFCC7, 'M', u'كل'), + (0xFCC8, 'M', u'كم'), + (0xFCC9, 'M', u'لج'), + (0xFCCA, 'M', u'لح'), + (0xFCCB, 'M', u'لخ'), + (0xFCCC, 'M', u'لم'), + (0xFCCD, 'M', u'له'), + (0xFCCE, 'M', u'مج'), + (0xFCCF, 'M', u'مح'), + (0xFCD0, 'M', u'مخ'), + (0xFCD1, 'M', u'مم'), + (0xFCD2, 'M', u'نج'), + (0xFCD3, 'M', u'نح'), + (0xFCD4, 'M', u'نخ'), + (0xFCD5, 'M', u'نم'), + (0xFCD6, 'M', u'نه'), + (0xFCD7, 'M', u'هج'), + (0xFCD8, 'M', u'هم'), + (0xFCD9, 'M', u'هٰ'), + (0xFCDA, 'M', u'يج'), + (0xFCDB, 'M', u'يح'), + (0xFCDC, 'M', u'يخ'), + (0xFCDD, 'M', u'يم'), + (0xFCDE, 'M', u'يه'), + (0xFCDF, 'M', u'ئم'), + (0xFCE0, 'M', u'ئه'), + (0xFCE1, 'M', u'بم'), + (0xFCE2, 'M', u'به'), + (0xFCE3, 'M', u'تم'), + (0xFCE4, 'M', u'ته'), + (0xFCE5, 'M', u'ثم'), + (0xFCE6, 'M', u'ثه'), + (0xFCE7, 'M', u'سم'), + (0xFCE8, 'M', u'سه'), + (0xFCE9, 'M', u'شم'), + (0xFCEA, 'M', u'شه'), + (0xFCEB, 'M', u'كل'), + (0xFCEC, 'M', u'كم'), + (0xFCED, 'M', u'لم'), + (0xFCEE, 'M', u'نم'), + ] + +def _seg_47(): + return [ + (0xFCEF, 'M', u'نه'), + (0xFCF0, 'M', u'يم'), + (0xFCF1, 'M', u'يه'), + (0xFCF2, 'M', u'ـَّ'), + (0xFCF3, 'M', u'ـُّ'), + (0xFCF4, 'M', u'ـِّ'), + (0xFCF5, 'M', u'طى'), + (0xFCF6, 'M', u'طي'), + (0xFCF7, 'M', u'عى'), + (0xFCF8, 'M', u'عي'), + (0xFCF9, 'M', u'غى'), + (0xFCFA, 'M', u'غي'), + (0xFCFB, 'M', u'سى'), + (0xFCFC, 'M', u'سي'), + (0xFCFD, 'M', u'شى'), + (0xFCFE, 'M', u'شي'), + (0xFCFF, 'M', u'حى'), + (0xFD00, 'M', u'حي'), + (0xFD01, 'M', u'جى'), + (0xFD02, 'M', u'جي'), + (0xFD03, 'M', u'خى'), + (0xFD04, 'M', u'خي'), + (0xFD05, 'M', u'صى'), + (0xFD06, 'M', u'صي'), + (0xFD07, 'M', u'ضى'), + (0xFD08, 'M', u'ضي'), + (0xFD09, 'M', u'شج'), + (0xFD0A, 'M', u'شح'), + (0xFD0B, 'M', u'شخ'), + (0xFD0C, 'M', u'شم'), + (0xFD0D, 'M', u'شر'), + (0xFD0E, 'M', u'سر'), + (0xFD0F, 'M', u'صر'), + (0xFD10, 'M', u'ضر'), + (0xFD11, 'M', u'طى'), + (0xFD12, 'M', u'طي'), + (0xFD13, 'M', u'عى'), + (0xFD14, 'M', u'عي'), + (0xFD15, 'M', u'غى'), + (0xFD16, 'M', u'غي'), + (0xFD17, 'M', u'سى'), + (0xFD18, 'M', u'سي'), + (0xFD19, 'M', u'شى'), + (0xFD1A, 'M', u'شي'), + (0xFD1B, 'M', u'حى'), + (0xFD1C, 'M', u'حي'), + (0xFD1D, 'M', u'جى'), + (0xFD1E, 'M', u'جي'), + (0xFD1F, 'M', u'خى'), + (0xFD20, 'M', u'خي'), + (0xFD21, 'M', u'صى'), + (0xFD22, 'M', u'صي'), + (0xFD23, 'M', u'ضى'), + (0xFD24, 'M', u'ضي'), + (0xFD25, 'M', u'شج'), + (0xFD26, 'M', u'شح'), + (0xFD27, 'M', u'شخ'), + (0xFD28, 'M', u'شم'), + (0xFD29, 'M', u'شر'), + (0xFD2A, 'M', u'سر'), + (0xFD2B, 'M', u'صر'), + (0xFD2C, 'M', u'ضر'), + (0xFD2D, 'M', u'شج'), + (0xFD2E, 'M', u'شح'), + (0xFD2F, 'M', u'شخ'), + (0xFD30, 'M', u'شم'), + (0xFD31, 'M', u'سه'), + (0xFD32, 'M', u'شه'), + (0xFD33, 'M', u'طم'), + (0xFD34, 'M', u'سج'), + (0xFD35, 'M', u'سح'), + (0xFD36, 'M', u'سخ'), + (0xFD37, 'M', u'شج'), + (0xFD38, 'M', u'شح'), + (0xFD39, 'M', u'شخ'), + (0xFD3A, 'M', u'طم'), + (0xFD3B, 'M', u'ظم'), + (0xFD3C, 'M', u'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', u'تجم'), + (0xFD51, 'M', u'تحج'), + (0xFD53, 'M', u'تحم'), + (0xFD54, 'M', u'تخم'), + (0xFD55, 'M', u'تمج'), + (0xFD56, 'M', u'تمح'), + (0xFD57, 'M', u'تمخ'), + (0xFD58, 'M', u'جمح'), + (0xFD5A, 'M', u'حمي'), + (0xFD5B, 'M', u'حمى'), + (0xFD5C, 'M', u'سحج'), + (0xFD5D, 'M', u'سجح'), + (0xFD5E, 'M', u'سجى'), + (0xFD5F, 'M', u'سمح'), + (0xFD61, 'M', u'سمج'), + (0xFD62, 'M', u'سمم'), + (0xFD64, 'M', u'صحح'), + (0xFD66, 'M', u'صمم'), + (0xFD67, 'M', u'شحم'), + (0xFD69, 'M', u'شجي'), + ] + +def _seg_48(): + return [ + (0xFD6A, 'M', u'شمخ'), + (0xFD6C, 'M', u'شمم'), + (0xFD6E, 'M', u'ضحى'), + (0xFD6F, 'M', u'ضخم'), + (0xFD71, 'M', u'طمح'), + (0xFD73, 'M', u'طمم'), + (0xFD74, 'M', u'طمي'), + (0xFD75, 'M', u'عجم'), + (0xFD76, 'M', u'عمم'), + (0xFD78, 'M', u'عمى'), + (0xFD79, 'M', u'غمم'), + (0xFD7A, 'M', u'غمي'), + (0xFD7B, 'M', u'غمى'), + (0xFD7C, 'M', u'فخم'), + (0xFD7E, 'M', u'قمح'), + (0xFD7F, 'M', u'قمم'), + (0xFD80, 'M', u'لحم'), + (0xFD81, 'M', u'لحي'), + (0xFD82, 'M', u'لحى'), + (0xFD83, 'M', u'لجج'), + (0xFD85, 'M', u'لخم'), + (0xFD87, 'M', u'لمح'), + (0xFD89, 'M', u'محج'), + (0xFD8A, 'M', u'محم'), + (0xFD8B, 'M', u'محي'), + (0xFD8C, 'M', u'مجح'), + (0xFD8D, 'M', u'مجم'), + (0xFD8E, 'M', u'مخج'), + (0xFD8F, 'M', u'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', u'مجخ'), + (0xFD93, 'M', u'همج'), + (0xFD94, 'M', u'همم'), + (0xFD95, 'M', u'نحم'), + (0xFD96, 'M', u'نحى'), + (0xFD97, 'M', u'نجم'), + (0xFD99, 'M', u'نجى'), + (0xFD9A, 'M', u'نمي'), + (0xFD9B, 'M', u'نمى'), + (0xFD9C, 'M', u'يمم'), + (0xFD9E, 'M', u'بخي'), + (0xFD9F, 'M', u'تجي'), + (0xFDA0, 'M', u'تجى'), + (0xFDA1, 'M', u'تخي'), + (0xFDA2, 'M', u'تخى'), + (0xFDA3, 'M', u'تمي'), + (0xFDA4, 'M', u'تمى'), + (0xFDA5, 'M', u'جمي'), + (0xFDA6, 'M', u'جحى'), + (0xFDA7, 'M', u'جمى'), + (0xFDA8, 'M', u'سخى'), + (0xFDA9, 'M', u'صحي'), + (0xFDAA, 'M', u'شحي'), + (0xFDAB, 'M', u'ضحي'), + (0xFDAC, 'M', u'لجي'), + (0xFDAD, 'M', u'لمي'), + (0xFDAE, 'M', u'يحي'), + (0xFDAF, 'M', u'يجي'), + (0xFDB0, 'M', u'يمي'), + (0xFDB1, 'M', u'ممي'), + (0xFDB2, 'M', u'قمي'), + (0xFDB3, 'M', u'نحي'), + (0xFDB4, 'M', u'قمح'), + (0xFDB5, 'M', u'لحم'), + (0xFDB6, 'M', u'عمي'), + (0xFDB7, 'M', u'كمي'), + (0xFDB8, 'M', u'نجح'), + (0xFDB9, 'M', u'مخي'), + (0xFDBA, 'M', u'لجم'), + (0xFDBB, 'M', u'كمم'), + (0xFDBC, 'M', u'لجم'), + (0xFDBD, 'M', u'نجح'), + (0xFDBE, 'M', u'جحي'), + (0xFDBF, 'M', u'حجي'), + (0xFDC0, 'M', u'مجي'), + (0xFDC1, 'M', u'فمي'), + (0xFDC2, 'M', u'بحي'), + (0xFDC3, 'M', u'كمم'), + (0xFDC4, 'M', u'عجم'), + (0xFDC5, 'M', u'صمم'), + (0xFDC6, 'M', u'سخي'), + (0xFDC7, 'M', u'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', u'صلے'), + (0xFDF1, 'M', u'قلے'), + (0xFDF2, 'M', u'الله'), + (0xFDF3, 'M', u'اكبر'), + (0xFDF4, 'M', u'محمد'), + (0xFDF5, 'M', u'صلعم'), + (0xFDF6, 'M', u'رسول'), + (0xFDF7, 'M', u'عليه'), + (0xFDF8, 'M', u'وسلم'), + (0xFDF9, 'M', u'صلى'), + (0xFDFA, '3', u'صلى الله عليه وسلم'), + (0xFDFB, '3', u'جل جلاله'), + (0xFDFC, 'M', u'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', u','), + ] + +def _seg_49(): + return [ + (0xFE11, 'M', u'、'), + (0xFE12, 'X'), + (0xFE13, '3', u':'), + (0xFE14, '3', u';'), + (0xFE15, '3', u'!'), + (0xFE16, '3', u'?'), + (0xFE17, 'M', u'〖'), + (0xFE18, 'M', u'〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', u'—'), + (0xFE32, 'M', u'–'), + (0xFE33, '3', u'_'), + (0xFE35, '3', u'('), + (0xFE36, '3', u')'), + (0xFE37, '3', u'{'), + (0xFE38, '3', u'}'), + (0xFE39, 'M', u'〔'), + (0xFE3A, 'M', u'〕'), + (0xFE3B, 'M', u'【'), + (0xFE3C, 'M', u'】'), + (0xFE3D, 'M', u'《'), + (0xFE3E, 'M', u'》'), + (0xFE3F, 'M', u'〈'), + (0xFE40, 'M', u'〉'), + (0xFE41, 'M', u'「'), + (0xFE42, 'M', u'」'), + (0xFE43, 'M', u'『'), + (0xFE44, 'M', u'』'), + (0xFE45, 'V'), + (0xFE47, '3', u'['), + (0xFE48, '3', u']'), + (0xFE49, '3', u' ̅'), + (0xFE4D, '3', u'_'), + (0xFE50, '3', u','), + (0xFE51, 'M', u'、'), + (0xFE52, 'X'), + (0xFE54, '3', u';'), + (0xFE55, '3', u':'), + (0xFE56, '3', u'?'), + (0xFE57, '3', u'!'), + (0xFE58, 'M', u'—'), + (0xFE59, '3', u'('), + (0xFE5A, '3', u')'), + (0xFE5B, '3', u'{'), + (0xFE5C, '3', u'}'), + (0xFE5D, 'M', u'〔'), + (0xFE5E, 'M', u'〕'), + (0xFE5F, '3', u'#'), + (0xFE60, '3', u'&'), + (0xFE61, '3', u'*'), + (0xFE62, '3', u'+'), + (0xFE63, 'M', u'-'), + (0xFE64, '3', u'<'), + (0xFE65, '3', u'>'), + (0xFE66, '3', u'='), + (0xFE67, 'X'), + (0xFE68, '3', u'\\'), + (0xFE69, '3', u'$'), + (0xFE6A, '3', u'%'), + (0xFE6B, '3', u'@'), + (0xFE6C, 'X'), + (0xFE70, '3', u' ً'), + (0xFE71, 'M', u'ـً'), + (0xFE72, '3', u' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', u' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', u' َ'), + (0xFE77, 'M', u'ـَ'), + (0xFE78, '3', u' ُ'), + (0xFE79, 'M', u'ـُ'), + (0xFE7A, '3', u' ِ'), + (0xFE7B, 'M', u'ـِ'), + (0xFE7C, '3', u' ّ'), + (0xFE7D, 'M', u'ـّ'), + (0xFE7E, '3', u' ْ'), + (0xFE7F, 'M', u'ـْ'), + (0xFE80, 'M', u'ء'), + (0xFE81, 'M', u'آ'), + (0xFE83, 'M', u'أ'), + (0xFE85, 'M', u'ؤ'), + (0xFE87, 'M', u'إ'), + (0xFE89, 'M', u'ئ'), + (0xFE8D, 'M', u'ا'), + (0xFE8F, 'M', u'ب'), + (0xFE93, 'M', u'ة'), + (0xFE95, 'M', u'ت'), + (0xFE99, 'M', u'ث'), + (0xFE9D, 'M', u'ج'), + (0xFEA1, 'M', u'ح'), + (0xFEA5, 'M', u'خ'), + (0xFEA9, 'M', u'د'), + (0xFEAB, 'M', u'ذ'), + (0xFEAD, 'M', u'ر'), + (0xFEAF, 'M', u'ز'), + (0xFEB1, 'M', u'س'), + (0xFEB5, 'M', u'ش'), + (0xFEB9, 'M', u'ص'), + ] + +def _seg_50(): + return [ + (0xFEBD, 'M', u'ض'), + (0xFEC1, 'M', u'ط'), + (0xFEC5, 'M', u'ظ'), + (0xFEC9, 'M', u'ع'), + (0xFECD, 'M', u'غ'), + (0xFED1, 'M', u'ف'), + (0xFED5, 'M', u'ق'), + (0xFED9, 'M', u'ك'), + (0xFEDD, 'M', u'ل'), + (0xFEE1, 'M', u'م'), + (0xFEE5, 'M', u'ن'), + (0xFEE9, 'M', u'ه'), + (0xFEED, 'M', u'و'), + (0xFEEF, 'M', u'ى'), + (0xFEF1, 'M', u'ي'), + (0xFEF5, 'M', u'لآ'), + (0xFEF7, 'M', u'لأ'), + (0xFEF9, 'M', u'لإ'), + (0xFEFB, 'M', u'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', u'!'), + (0xFF02, '3', u'"'), + (0xFF03, '3', u'#'), + (0xFF04, '3', u'$'), + (0xFF05, '3', u'%'), + (0xFF06, '3', u'&'), + (0xFF07, '3', u'\''), + (0xFF08, '3', u'('), + (0xFF09, '3', u')'), + (0xFF0A, '3', u'*'), + (0xFF0B, '3', u'+'), + (0xFF0C, '3', u','), + (0xFF0D, 'M', u'-'), + (0xFF0E, 'M', u'.'), + (0xFF0F, '3', u'/'), + (0xFF10, 'M', u'0'), + (0xFF11, 'M', u'1'), + (0xFF12, 'M', u'2'), + (0xFF13, 'M', u'3'), + (0xFF14, 'M', u'4'), + (0xFF15, 'M', u'5'), + (0xFF16, 'M', u'6'), + (0xFF17, 'M', u'7'), + (0xFF18, 'M', u'8'), + (0xFF19, 'M', u'9'), + (0xFF1A, '3', u':'), + (0xFF1B, '3', u';'), + (0xFF1C, '3', u'<'), + (0xFF1D, '3', u'='), + (0xFF1E, '3', u'>'), + (0xFF1F, '3', u'?'), + (0xFF20, '3', u'@'), + (0xFF21, 'M', u'a'), + (0xFF22, 'M', u'b'), + (0xFF23, 'M', u'c'), + (0xFF24, 'M', u'd'), + (0xFF25, 'M', u'e'), + (0xFF26, 'M', u'f'), + (0xFF27, 'M', u'g'), + (0xFF28, 'M', u'h'), + (0xFF29, 'M', u'i'), + (0xFF2A, 'M', u'j'), + (0xFF2B, 'M', u'k'), + (0xFF2C, 'M', u'l'), + (0xFF2D, 'M', u'm'), + (0xFF2E, 'M', u'n'), + (0xFF2F, 'M', u'o'), + (0xFF30, 'M', u'p'), + (0xFF31, 'M', u'q'), + (0xFF32, 'M', u'r'), + (0xFF33, 'M', u's'), + (0xFF34, 'M', u't'), + (0xFF35, 'M', u'u'), + (0xFF36, 'M', u'v'), + (0xFF37, 'M', u'w'), + (0xFF38, 'M', u'x'), + (0xFF39, 'M', u'y'), + (0xFF3A, 'M', u'z'), + (0xFF3B, '3', u'['), + (0xFF3C, '3', u'\\'), + (0xFF3D, '3', u']'), + (0xFF3E, '3', u'^'), + (0xFF3F, '3', u'_'), + (0xFF40, '3', u'`'), + (0xFF41, 'M', u'a'), + (0xFF42, 'M', u'b'), + (0xFF43, 'M', u'c'), + (0xFF44, 'M', u'd'), + (0xFF45, 'M', u'e'), + (0xFF46, 'M', u'f'), + (0xFF47, 'M', u'g'), + (0xFF48, 'M', u'h'), + (0xFF49, 'M', u'i'), + (0xFF4A, 'M', u'j'), + (0xFF4B, 'M', u'k'), + (0xFF4C, 'M', u'l'), + (0xFF4D, 'M', u'm'), + (0xFF4E, 'M', u'n'), + ] + +def _seg_51(): + return [ + (0xFF4F, 'M', u'o'), + (0xFF50, 'M', u'p'), + (0xFF51, 'M', u'q'), + (0xFF52, 'M', u'r'), + (0xFF53, 'M', u's'), + (0xFF54, 'M', u't'), + (0xFF55, 'M', u'u'), + (0xFF56, 'M', u'v'), + (0xFF57, 'M', u'w'), + (0xFF58, 'M', u'x'), + (0xFF59, 'M', u'y'), + (0xFF5A, 'M', u'z'), + (0xFF5B, '3', u'{'), + (0xFF5C, '3', u'|'), + (0xFF5D, '3', u'}'), + (0xFF5E, '3', u'~'), + (0xFF5F, 'M', u'⦅'), + (0xFF60, 'M', u'⦆'), + (0xFF61, 'M', u'.'), + (0xFF62, 'M', u'「'), + (0xFF63, 'M', u'」'), + (0xFF64, 'M', u'、'), + (0xFF65, 'M', u'・'), + (0xFF66, 'M', u'ヲ'), + (0xFF67, 'M', u'ァ'), + (0xFF68, 'M', u'ィ'), + (0xFF69, 'M', u'ゥ'), + (0xFF6A, 'M', u'ェ'), + (0xFF6B, 'M', u'ォ'), + (0xFF6C, 'M', u'ャ'), + (0xFF6D, 'M', u'ュ'), + (0xFF6E, 'M', u'ョ'), + (0xFF6F, 'M', u'ッ'), + (0xFF70, 'M', u'ー'), + (0xFF71, 'M', u'ア'), + (0xFF72, 'M', u'イ'), + (0xFF73, 'M', u'ウ'), + (0xFF74, 'M', u'エ'), + (0xFF75, 'M', u'オ'), + (0xFF76, 'M', u'カ'), + (0xFF77, 'M', u'キ'), + (0xFF78, 'M', u'ク'), + (0xFF79, 'M', u'ケ'), + (0xFF7A, 'M', u'コ'), + (0xFF7B, 'M', u'サ'), + (0xFF7C, 'M', u'シ'), + (0xFF7D, 'M', u'ス'), + (0xFF7E, 'M', u'セ'), + (0xFF7F, 'M', u'ソ'), + (0xFF80, 'M', u'タ'), + (0xFF81, 'M', u'チ'), + (0xFF82, 'M', u'ツ'), + (0xFF83, 'M', u'テ'), + (0xFF84, 'M', u'ト'), + (0xFF85, 'M', u'ナ'), + (0xFF86, 'M', u'ニ'), + (0xFF87, 'M', u'ヌ'), + (0xFF88, 'M', u'ネ'), + (0xFF89, 'M', u'ノ'), + (0xFF8A, 'M', u'ハ'), + (0xFF8B, 'M', u'ヒ'), + (0xFF8C, 'M', u'フ'), + (0xFF8D, 'M', u'ヘ'), + (0xFF8E, 'M', u'ホ'), + (0xFF8F, 'M', u'マ'), + (0xFF90, 'M', u'ミ'), + (0xFF91, 'M', u'ム'), + (0xFF92, 'M', u'メ'), + (0xFF93, 'M', u'モ'), + (0xFF94, 'M', u'ヤ'), + (0xFF95, 'M', u'ユ'), + (0xFF96, 'M', u'ヨ'), + (0xFF97, 'M', u'ラ'), + (0xFF98, 'M', u'リ'), + (0xFF99, 'M', u'ル'), + (0xFF9A, 'M', u'レ'), + (0xFF9B, 'M', u'ロ'), + (0xFF9C, 'M', u'ワ'), + (0xFF9D, 'M', u'ン'), + (0xFF9E, 'M', u'゙'), + (0xFF9F, 'M', u'゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', u'ᄀ'), + (0xFFA2, 'M', u'ᄁ'), + (0xFFA3, 'M', u'ᆪ'), + (0xFFA4, 'M', u'ᄂ'), + (0xFFA5, 'M', u'ᆬ'), + (0xFFA6, 'M', u'ᆭ'), + (0xFFA7, 'M', u'ᄃ'), + (0xFFA8, 'M', u'ᄄ'), + (0xFFA9, 'M', u'ᄅ'), + (0xFFAA, 'M', u'ᆰ'), + (0xFFAB, 'M', u'ᆱ'), + (0xFFAC, 'M', u'ᆲ'), + (0xFFAD, 'M', u'ᆳ'), + (0xFFAE, 'M', u'ᆴ'), + (0xFFAF, 'M', u'ᆵ'), + (0xFFB0, 'M', u'ᄚ'), + (0xFFB1, 'M', u'ᄆ'), + (0xFFB2, 'M', u'ᄇ'), + ] + +def _seg_52(): + return [ + (0xFFB3, 'M', u'ᄈ'), + (0xFFB4, 'M', u'ᄡ'), + (0xFFB5, 'M', u'ᄉ'), + (0xFFB6, 'M', u'ᄊ'), + (0xFFB7, 'M', u'ᄋ'), + (0xFFB8, 'M', u'ᄌ'), + (0xFFB9, 'M', u'ᄍ'), + (0xFFBA, 'M', u'ᄎ'), + (0xFFBB, 'M', u'ᄏ'), + (0xFFBC, 'M', u'ᄐ'), + (0xFFBD, 'M', u'ᄑ'), + (0xFFBE, 'M', u'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', u'ᅡ'), + (0xFFC3, 'M', u'ᅢ'), + (0xFFC4, 'M', u'ᅣ'), + (0xFFC5, 'M', u'ᅤ'), + (0xFFC6, 'M', u'ᅥ'), + (0xFFC7, 'M', u'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', u'ᅧ'), + (0xFFCB, 'M', u'ᅨ'), + (0xFFCC, 'M', u'ᅩ'), + (0xFFCD, 'M', u'ᅪ'), + (0xFFCE, 'M', u'ᅫ'), + (0xFFCF, 'M', u'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', u'ᅭ'), + (0xFFD3, 'M', u'ᅮ'), + (0xFFD4, 'M', u'ᅯ'), + (0xFFD5, 'M', u'ᅰ'), + (0xFFD6, 'M', u'ᅱ'), + (0xFFD7, 'M', u'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', u'ᅳ'), + (0xFFDB, 'M', u'ᅴ'), + (0xFFDC, 'M', u'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', u'¢'), + (0xFFE1, 'M', u'£'), + (0xFFE2, 'M', u'¬'), + (0xFFE3, '3', u' ̄'), + (0xFFE4, 'M', u'¦'), + (0xFFE5, 'M', u'¥'), + (0xFFE6, 'M', u'₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', u'│'), + (0xFFE9, 'M', u'←'), + (0xFFEA, 'M', u'↑'), + (0xFFEB, 'M', u'→'), + (0xFFEC, 'M', u'↓'), + (0xFFED, 'M', u'■'), + (0xFFEE, 'M', u'○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019C, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', u'𐐨'), + (0x10401, 'M', u'𐐩'), + ] + +def _seg_53(): + return [ + (0x10402, 'M', u'𐐪'), + (0x10403, 'M', u'𐐫'), + (0x10404, 'M', u'𐐬'), + (0x10405, 'M', u'𐐭'), + (0x10406, 'M', u'𐐮'), + (0x10407, 'M', u'𐐯'), + (0x10408, 'M', u'𐐰'), + (0x10409, 'M', u'𐐱'), + (0x1040A, 'M', u'𐐲'), + (0x1040B, 'M', u'𐐳'), + (0x1040C, 'M', u'𐐴'), + (0x1040D, 'M', u'𐐵'), + (0x1040E, 'M', u'𐐶'), + (0x1040F, 'M', u'𐐷'), + (0x10410, 'M', u'𐐸'), + (0x10411, 'M', u'𐐹'), + (0x10412, 'M', u'𐐺'), + (0x10413, 'M', u'𐐻'), + (0x10414, 'M', u'𐐼'), + (0x10415, 'M', u'𐐽'), + (0x10416, 'M', u'𐐾'), + (0x10417, 'M', u'𐐿'), + (0x10418, 'M', u'𐑀'), + (0x10419, 'M', u'𐑁'), + (0x1041A, 'M', u'𐑂'), + (0x1041B, 'M', u'𐑃'), + (0x1041C, 'M', u'𐑄'), + (0x1041D, 'M', u'𐑅'), + (0x1041E, 'M', u'𐑆'), + (0x1041F, 'M', u'𐑇'), + (0x10420, 'M', u'𐑈'), + (0x10421, 'M', u'𐑉'), + (0x10422, 'M', u'𐑊'), + (0x10423, 'M', u'𐑋'), + (0x10424, 'M', u'𐑌'), + (0x10425, 'M', u'𐑍'), + (0x10426, 'M', u'𐑎'), + (0x10427, 'M', u'𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', u'𐓘'), + (0x104B1, 'M', u'𐓙'), + (0x104B2, 'M', u'𐓚'), + (0x104B3, 'M', u'𐓛'), + (0x104B4, 'M', u'𐓜'), + (0x104B5, 'M', u'𐓝'), + (0x104B6, 'M', u'𐓞'), + (0x104B7, 'M', u'𐓟'), + (0x104B8, 'M', u'𐓠'), + (0x104B9, 'M', u'𐓡'), + (0x104BA, 'M', u'𐓢'), + (0x104BB, 'M', u'𐓣'), + (0x104BC, 'M', u'𐓤'), + (0x104BD, 'M', u'𐓥'), + (0x104BE, 'M', u'𐓦'), + (0x104BF, 'M', u'𐓧'), + (0x104C0, 'M', u'𐓨'), + (0x104C1, 'M', u'𐓩'), + (0x104C2, 'M', u'𐓪'), + (0x104C3, 'M', u'𐓫'), + (0x104C4, 'M', u'𐓬'), + (0x104C5, 'M', u'𐓭'), + (0x104C6, 'M', u'𐓮'), + (0x104C7, 'M', u'𐓯'), + (0x104C8, 'M', u'𐓰'), + (0x104C9, 'M', u'𐓱'), + (0x104CA, 'M', u'𐓲'), + (0x104CB, 'M', u'𐓳'), + (0x104CC, 'M', u'𐓴'), + (0x104CD, 'M', u'𐓵'), + (0x104CE, 'M', u'𐓶'), + (0x104CF, 'M', u'𐓷'), + (0x104D0, 'M', u'𐓸'), + (0x104D1, 'M', u'𐓹'), + (0x104D2, 'M', u'𐓺'), + (0x104D3, 'M', u'𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + ] + +def _seg_54(): + return [ + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', u'𐳀'), + (0x10C81, 'M', u'𐳁'), + (0x10C82, 'M', u'𐳂'), + (0x10C83, 'M', u'𐳃'), + (0x10C84, 'M', u'𐳄'), + (0x10C85, 'M', u'𐳅'), + (0x10C86, 'M', u'𐳆'), + (0x10C87, 'M', u'𐳇'), + (0x10C88, 'M', u'𐳈'), + (0x10C89, 'M', u'𐳉'), + (0x10C8A, 'M', u'𐳊'), + (0x10C8B, 'M', u'𐳋'), + (0x10C8C, 'M', u'𐳌'), + (0x10C8D, 'M', u'𐳍'), + (0x10C8E, 'M', u'𐳎'), + (0x10C8F, 'M', u'𐳏'), + (0x10C90, 'M', u'𐳐'), + (0x10C91, 'M', u'𐳑'), + (0x10C92, 'M', u'𐳒'), + (0x10C93, 'M', u'𐳓'), + (0x10C94, 'M', u'𐳔'), + (0x10C95, 'M', u'𐳕'), + (0x10C96, 'M', u'𐳖'), + (0x10C97, 'M', u'𐳗'), + (0x10C98, 'M', u'𐳘'), + (0x10C99, 'M', u'𐳙'), + (0x10C9A, 'M', u'𐳚'), + (0x10C9B, 'M', u'𐳛'), + (0x10C9C, 'M', u'𐳜'), + (0x10C9D, 'M', u'𐳝'), + (0x10C9E, 'M', u'𐳞'), + (0x10C9F, 'M', u'𐳟'), + (0x10CA0, 'M', u'𐳠'), + (0x10CA1, 'M', u'𐳡'), + (0x10CA2, 'M', u'𐳢'), + (0x10CA3, 'M', u'𐳣'), + (0x10CA4, 'M', u'𐳤'), + (0x10CA5, 'M', u'𐳥'), + (0x10CA6, 'M', u'𐳦'), + (0x10CA7, 'M', u'𐳧'), + (0x10CA8, 'M', u'𐳨'), + ] + +def _seg_55(): + return [ + (0x10CA9, 'M', u'𐳩'), + (0x10CAA, 'M', u'𐳪'), + (0x10CAB, 'M', u'𐳫'), + (0x10CAC, 'M', u'𐳬'), + (0x10CAD, 'M', u'𐳭'), + (0x10CAE, 'M', u'𐳮'), + (0x10CAF, 'M', u'𐳯'), + (0x10CB0, 'M', u'𐳰'), + (0x10CB1, 'M', u'𐳱'), + (0x10CB2, 'M', u'𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10F00, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11147, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111CE, 'X'), + (0x111D0, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145A, 'X'), + (0x1145B, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + ] + +def _seg_56(): + return [ + (0x1145F, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116B8, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11740, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', u'𑣀'), + (0x118A1, 'M', u'𑣁'), + (0x118A2, 'M', u'𑣂'), + (0x118A3, 'M', u'𑣃'), + (0x118A4, 'M', u'𑣄'), + (0x118A5, 'M', u'𑣅'), + (0x118A6, 'M', u'𑣆'), + (0x118A7, 'M', u'𑣇'), + (0x118A8, 'M', u'𑣈'), + (0x118A9, 'M', u'𑣉'), + (0x118AA, 'M', u'𑣊'), + (0x118AB, 'M', u'𑣋'), + (0x118AC, 'M', u'𑣌'), + (0x118AD, 'M', u'𑣍'), + (0x118AE, 'M', u'𑣎'), + (0x118AF, 'M', u'𑣏'), + (0x118B0, 'M', u'𑣐'), + (0x118B1, 'M', u'𑣑'), + (0x118B2, 'M', u'𑣒'), + (0x118B3, 'M', u'𑣓'), + (0x118B4, 'M', u'𑣔'), + (0x118B5, 'M', u'𑣕'), + (0x118B6, 'M', u'𑣖'), + (0x118B7, 'M', u'𑣗'), + (0x118B8, 'M', u'𑣘'), + (0x118B9, 'M', u'𑣙'), + (0x118BA, 'M', u'𑣚'), + (0x118BB, 'M', u'𑣛'), + (0x118BC, 'M', u'𑣜'), + (0x118BD, 'M', u'𑣝'), + (0x118BE, 'M', u'𑣞'), + (0x118BF, 'M', u'𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11900, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11A84, 'X'), + (0x11A86, 'V'), + (0x11AA3, 'X'), + (0x11AC0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + ] + +def _seg_57(): + return [ + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x12000, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16A70, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F45, 'X'), + (0x16F50, 'V'), + (0x16F7F, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE2, 'X'), + (0x17000, 'V'), + (0x187F2, 'X'), + (0x18800, 'V'), + (0x18AF3, 'X'), + (0x1B000, 'V'), + (0x1B11F, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', u'𝅗𝅥'), + (0x1D15F, 'M', u'𝅘𝅥'), + (0x1D160, 'M', u'𝅘𝅥𝅮'), + (0x1D161, 'M', u'𝅘𝅥𝅯'), + (0x1D162, 'M', u'𝅘𝅥𝅰'), + (0x1D163, 'M', u'𝅘𝅥𝅱'), + (0x1D164, 'M', u'𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', u'𝆹𝅥'), + (0x1D1BC, 'M', u'𝆺𝅥'), + (0x1D1BD, 'M', u'𝆹𝅥𝅮'), + (0x1D1BE, 'M', u'𝆺𝅥𝅮'), + (0x1D1BF, 'M', u'𝆹𝅥𝅯'), + (0x1D1C0, 'M', u'𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1E9, 'X'), + (0x1D200, 'V'), + ] + +def _seg_58(): + return [ + (0x1D246, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', u'a'), + (0x1D401, 'M', u'b'), + (0x1D402, 'M', u'c'), + (0x1D403, 'M', u'd'), + (0x1D404, 'M', u'e'), + (0x1D405, 'M', u'f'), + (0x1D406, 'M', u'g'), + (0x1D407, 'M', u'h'), + (0x1D408, 'M', u'i'), + (0x1D409, 'M', u'j'), + (0x1D40A, 'M', u'k'), + (0x1D40B, 'M', u'l'), + (0x1D40C, 'M', u'm'), + (0x1D40D, 'M', u'n'), + (0x1D40E, 'M', u'o'), + (0x1D40F, 'M', u'p'), + (0x1D410, 'M', u'q'), + (0x1D411, 'M', u'r'), + (0x1D412, 'M', u's'), + (0x1D413, 'M', u't'), + (0x1D414, 'M', u'u'), + (0x1D415, 'M', u'v'), + (0x1D416, 'M', u'w'), + (0x1D417, 'M', u'x'), + (0x1D418, 'M', u'y'), + (0x1D419, 'M', u'z'), + (0x1D41A, 'M', u'a'), + (0x1D41B, 'M', u'b'), + (0x1D41C, 'M', u'c'), + (0x1D41D, 'M', u'd'), + (0x1D41E, 'M', u'e'), + (0x1D41F, 'M', u'f'), + (0x1D420, 'M', u'g'), + (0x1D421, 'M', u'h'), + (0x1D422, 'M', u'i'), + (0x1D423, 'M', u'j'), + (0x1D424, 'M', u'k'), + (0x1D425, 'M', u'l'), + (0x1D426, 'M', u'm'), + (0x1D427, 'M', u'n'), + (0x1D428, 'M', u'o'), + (0x1D429, 'M', u'p'), + (0x1D42A, 'M', u'q'), + (0x1D42B, 'M', u'r'), + (0x1D42C, 'M', u's'), + (0x1D42D, 'M', u't'), + (0x1D42E, 'M', u'u'), + (0x1D42F, 'M', u'v'), + (0x1D430, 'M', u'w'), + (0x1D431, 'M', u'x'), + (0x1D432, 'M', u'y'), + (0x1D433, 'M', u'z'), + (0x1D434, 'M', u'a'), + (0x1D435, 'M', u'b'), + (0x1D436, 'M', u'c'), + (0x1D437, 'M', u'd'), + (0x1D438, 'M', u'e'), + (0x1D439, 'M', u'f'), + (0x1D43A, 'M', u'g'), + (0x1D43B, 'M', u'h'), + (0x1D43C, 'M', u'i'), + (0x1D43D, 'M', u'j'), + (0x1D43E, 'M', u'k'), + (0x1D43F, 'M', u'l'), + (0x1D440, 'M', u'm'), + (0x1D441, 'M', u'n'), + (0x1D442, 'M', u'o'), + (0x1D443, 'M', u'p'), + (0x1D444, 'M', u'q'), + (0x1D445, 'M', u'r'), + (0x1D446, 'M', u's'), + (0x1D447, 'M', u't'), + (0x1D448, 'M', u'u'), + (0x1D449, 'M', u'v'), + (0x1D44A, 'M', u'w'), + (0x1D44B, 'M', u'x'), + (0x1D44C, 'M', u'y'), + (0x1D44D, 'M', u'z'), + (0x1D44E, 'M', u'a'), + (0x1D44F, 'M', u'b'), + (0x1D450, 'M', u'c'), + (0x1D451, 'M', u'd'), + (0x1D452, 'M', u'e'), + (0x1D453, 'M', u'f'), + (0x1D454, 'M', u'g'), + (0x1D455, 'X'), + (0x1D456, 'M', u'i'), + (0x1D457, 'M', u'j'), + (0x1D458, 'M', u'k'), + (0x1D459, 'M', u'l'), + (0x1D45A, 'M', u'm'), + (0x1D45B, 'M', u'n'), + (0x1D45C, 'M', u'o'), + ] + +def _seg_59(): + return [ + (0x1D45D, 'M', u'p'), + (0x1D45E, 'M', u'q'), + (0x1D45F, 'M', u'r'), + (0x1D460, 'M', u's'), + (0x1D461, 'M', u't'), + (0x1D462, 'M', u'u'), + (0x1D463, 'M', u'v'), + (0x1D464, 'M', u'w'), + (0x1D465, 'M', u'x'), + (0x1D466, 'M', u'y'), + (0x1D467, 'M', u'z'), + (0x1D468, 'M', u'a'), + (0x1D469, 'M', u'b'), + (0x1D46A, 'M', u'c'), + (0x1D46B, 'M', u'd'), + (0x1D46C, 'M', u'e'), + (0x1D46D, 'M', u'f'), + (0x1D46E, 'M', u'g'), + (0x1D46F, 'M', u'h'), + (0x1D470, 'M', u'i'), + (0x1D471, 'M', u'j'), + (0x1D472, 'M', u'k'), + (0x1D473, 'M', u'l'), + (0x1D474, 'M', u'm'), + (0x1D475, 'M', u'n'), + (0x1D476, 'M', u'o'), + (0x1D477, 'M', u'p'), + (0x1D478, 'M', u'q'), + (0x1D479, 'M', u'r'), + (0x1D47A, 'M', u's'), + (0x1D47B, 'M', u't'), + (0x1D47C, 'M', u'u'), + (0x1D47D, 'M', u'v'), + (0x1D47E, 'M', u'w'), + (0x1D47F, 'M', u'x'), + (0x1D480, 'M', u'y'), + (0x1D481, 'M', u'z'), + (0x1D482, 'M', u'a'), + (0x1D483, 'M', u'b'), + (0x1D484, 'M', u'c'), + (0x1D485, 'M', u'd'), + (0x1D486, 'M', u'e'), + (0x1D487, 'M', u'f'), + (0x1D488, 'M', u'g'), + (0x1D489, 'M', u'h'), + (0x1D48A, 'M', u'i'), + (0x1D48B, 'M', u'j'), + (0x1D48C, 'M', u'k'), + (0x1D48D, 'M', u'l'), + (0x1D48E, 'M', u'm'), + (0x1D48F, 'M', u'n'), + (0x1D490, 'M', u'o'), + (0x1D491, 'M', u'p'), + (0x1D492, 'M', u'q'), + (0x1D493, 'M', u'r'), + (0x1D494, 'M', u's'), + (0x1D495, 'M', u't'), + (0x1D496, 'M', u'u'), + (0x1D497, 'M', u'v'), + (0x1D498, 'M', u'w'), + (0x1D499, 'M', u'x'), + (0x1D49A, 'M', u'y'), + (0x1D49B, 'M', u'z'), + (0x1D49C, 'M', u'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', u'c'), + (0x1D49F, 'M', u'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', u'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', u'j'), + (0x1D4A6, 'M', u'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', u'n'), + (0x1D4AA, 'M', u'o'), + (0x1D4AB, 'M', u'p'), + (0x1D4AC, 'M', u'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', u's'), + (0x1D4AF, 'M', u't'), + (0x1D4B0, 'M', u'u'), + (0x1D4B1, 'M', u'v'), + (0x1D4B2, 'M', u'w'), + (0x1D4B3, 'M', u'x'), + (0x1D4B4, 'M', u'y'), + (0x1D4B5, 'M', u'z'), + (0x1D4B6, 'M', u'a'), + (0x1D4B7, 'M', u'b'), + (0x1D4B8, 'M', u'c'), + (0x1D4B9, 'M', u'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', u'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', u'h'), + (0x1D4BE, 'M', u'i'), + (0x1D4BF, 'M', u'j'), + (0x1D4C0, 'M', u'k'), + (0x1D4C1, 'M', u'l'), + (0x1D4C2, 'M', u'm'), + (0x1D4C3, 'M', u'n'), + ] + +def _seg_60(): + return [ + (0x1D4C4, 'X'), + (0x1D4C5, 'M', u'p'), + (0x1D4C6, 'M', u'q'), + (0x1D4C7, 'M', u'r'), + (0x1D4C8, 'M', u's'), + (0x1D4C9, 'M', u't'), + (0x1D4CA, 'M', u'u'), + (0x1D4CB, 'M', u'v'), + (0x1D4CC, 'M', u'w'), + (0x1D4CD, 'M', u'x'), + (0x1D4CE, 'M', u'y'), + (0x1D4CF, 'M', u'z'), + (0x1D4D0, 'M', u'a'), + (0x1D4D1, 'M', u'b'), + (0x1D4D2, 'M', u'c'), + (0x1D4D3, 'M', u'd'), + (0x1D4D4, 'M', u'e'), + (0x1D4D5, 'M', u'f'), + (0x1D4D6, 'M', u'g'), + (0x1D4D7, 'M', u'h'), + (0x1D4D8, 'M', u'i'), + (0x1D4D9, 'M', u'j'), + (0x1D4DA, 'M', u'k'), + (0x1D4DB, 'M', u'l'), + (0x1D4DC, 'M', u'm'), + (0x1D4DD, 'M', u'n'), + (0x1D4DE, 'M', u'o'), + (0x1D4DF, 'M', u'p'), + (0x1D4E0, 'M', u'q'), + (0x1D4E1, 'M', u'r'), + (0x1D4E2, 'M', u's'), + (0x1D4E3, 'M', u't'), + (0x1D4E4, 'M', u'u'), + (0x1D4E5, 'M', u'v'), + (0x1D4E6, 'M', u'w'), + (0x1D4E7, 'M', u'x'), + (0x1D4E8, 'M', u'y'), + (0x1D4E9, 'M', u'z'), + (0x1D4EA, 'M', u'a'), + (0x1D4EB, 'M', u'b'), + (0x1D4EC, 'M', u'c'), + (0x1D4ED, 'M', u'd'), + (0x1D4EE, 'M', u'e'), + (0x1D4EF, 'M', u'f'), + (0x1D4F0, 'M', u'g'), + (0x1D4F1, 'M', u'h'), + (0x1D4F2, 'M', u'i'), + (0x1D4F3, 'M', u'j'), + (0x1D4F4, 'M', u'k'), + (0x1D4F5, 'M', u'l'), + (0x1D4F6, 'M', u'm'), + (0x1D4F7, 'M', u'n'), + (0x1D4F8, 'M', u'o'), + (0x1D4F9, 'M', u'p'), + (0x1D4FA, 'M', u'q'), + (0x1D4FB, 'M', u'r'), + (0x1D4FC, 'M', u's'), + (0x1D4FD, 'M', u't'), + (0x1D4FE, 'M', u'u'), + (0x1D4FF, 'M', u'v'), + (0x1D500, 'M', u'w'), + (0x1D501, 'M', u'x'), + (0x1D502, 'M', u'y'), + (0x1D503, 'M', u'z'), + (0x1D504, 'M', u'a'), + (0x1D505, 'M', u'b'), + (0x1D506, 'X'), + (0x1D507, 'M', u'd'), + (0x1D508, 'M', u'e'), + (0x1D509, 'M', u'f'), + (0x1D50A, 'M', u'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', u'j'), + (0x1D50E, 'M', u'k'), + (0x1D50F, 'M', u'l'), + (0x1D510, 'M', u'm'), + (0x1D511, 'M', u'n'), + (0x1D512, 'M', u'o'), + (0x1D513, 'M', u'p'), + (0x1D514, 'M', u'q'), + (0x1D515, 'X'), + (0x1D516, 'M', u's'), + (0x1D517, 'M', u't'), + (0x1D518, 'M', u'u'), + (0x1D519, 'M', u'v'), + (0x1D51A, 'M', u'w'), + (0x1D51B, 'M', u'x'), + (0x1D51C, 'M', u'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', u'a'), + (0x1D51F, 'M', u'b'), + (0x1D520, 'M', u'c'), + (0x1D521, 'M', u'd'), + (0x1D522, 'M', u'e'), + (0x1D523, 'M', u'f'), + (0x1D524, 'M', u'g'), + (0x1D525, 'M', u'h'), + (0x1D526, 'M', u'i'), + (0x1D527, 'M', u'j'), + (0x1D528, 'M', u'k'), + ] + +def _seg_61(): + return [ + (0x1D529, 'M', u'l'), + (0x1D52A, 'M', u'm'), + (0x1D52B, 'M', u'n'), + (0x1D52C, 'M', u'o'), + (0x1D52D, 'M', u'p'), + (0x1D52E, 'M', u'q'), + (0x1D52F, 'M', u'r'), + (0x1D530, 'M', u's'), + (0x1D531, 'M', u't'), + (0x1D532, 'M', u'u'), + (0x1D533, 'M', u'v'), + (0x1D534, 'M', u'w'), + (0x1D535, 'M', u'x'), + (0x1D536, 'M', u'y'), + (0x1D537, 'M', u'z'), + (0x1D538, 'M', u'a'), + (0x1D539, 'M', u'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', u'd'), + (0x1D53C, 'M', u'e'), + (0x1D53D, 'M', u'f'), + (0x1D53E, 'M', u'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', u'i'), + (0x1D541, 'M', u'j'), + (0x1D542, 'M', u'k'), + (0x1D543, 'M', u'l'), + (0x1D544, 'M', u'm'), + (0x1D545, 'X'), + (0x1D546, 'M', u'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', u's'), + (0x1D54B, 'M', u't'), + (0x1D54C, 'M', u'u'), + (0x1D54D, 'M', u'v'), + (0x1D54E, 'M', u'w'), + (0x1D54F, 'M', u'x'), + (0x1D550, 'M', u'y'), + (0x1D551, 'X'), + (0x1D552, 'M', u'a'), + (0x1D553, 'M', u'b'), + (0x1D554, 'M', u'c'), + (0x1D555, 'M', u'd'), + (0x1D556, 'M', u'e'), + (0x1D557, 'M', u'f'), + (0x1D558, 'M', u'g'), + (0x1D559, 'M', u'h'), + (0x1D55A, 'M', u'i'), + (0x1D55B, 'M', u'j'), + (0x1D55C, 'M', u'k'), + (0x1D55D, 'M', u'l'), + (0x1D55E, 'M', u'm'), + (0x1D55F, 'M', u'n'), + (0x1D560, 'M', u'o'), + (0x1D561, 'M', u'p'), + (0x1D562, 'M', u'q'), + (0x1D563, 'M', u'r'), + (0x1D564, 'M', u's'), + (0x1D565, 'M', u't'), + (0x1D566, 'M', u'u'), + (0x1D567, 'M', u'v'), + (0x1D568, 'M', u'w'), + (0x1D569, 'M', u'x'), + (0x1D56A, 'M', u'y'), + (0x1D56B, 'M', u'z'), + (0x1D56C, 'M', u'a'), + (0x1D56D, 'M', u'b'), + (0x1D56E, 'M', u'c'), + (0x1D56F, 'M', u'd'), + (0x1D570, 'M', u'e'), + (0x1D571, 'M', u'f'), + (0x1D572, 'M', u'g'), + (0x1D573, 'M', u'h'), + (0x1D574, 'M', u'i'), + (0x1D575, 'M', u'j'), + (0x1D576, 'M', u'k'), + (0x1D577, 'M', u'l'), + (0x1D578, 'M', u'm'), + (0x1D579, 'M', u'n'), + (0x1D57A, 'M', u'o'), + (0x1D57B, 'M', u'p'), + (0x1D57C, 'M', u'q'), + (0x1D57D, 'M', u'r'), + (0x1D57E, 'M', u's'), + (0x1D57F, 'M', u't'), + (0x1D580, 'M', u'u'), + (0x1D581, 'M', u'v'), + (0x1D582, 'M', u'w'), + (0x1D583, 'M', u'x'), + (0x1D584, 'M', u'y'), + (0x1D585, 'M', u'z'), + (0x1D586, 'M', u'a'), + (0x1D587, 'M', u'b'), + (0x1D588, 'M', u'c'), + (0x1D589, 'M', u'd'), + (0x1D58A, 'M', u'e'), + (0x1D58B, 'M', u'f'), + (0x1D58C, 'M', u'g'), + (0x1D58D, 'M', u'h'), + (0x1D58E, 'M', u'i'), + ] + +def _seg_62(): + return [ + (0x1D58F, 'M', u'j'), + (0x1D590, 'M', u'k'), + (0x1D591, 'M', u'l'), + (0x1D592, 'M', u'm'), + (0x1D593, 'M', u'n'), + (0x1D594, 'M', u'o'), + (0x1D595, 'M', u'p'), + (0x1D596, 'M', u'q'), + (0x1D597, 'M', u'r'), + (0x1D598, 'M', u's'), + (0x1D599, 'M', u't'), + (0x1D59A, 'M', u'u'), + (0x1D59B, 'M', u'v'), + (0x1D59C, 'M', u'w'), + (0x1D59D, 'M', u'x'), + (0x1D59E, 'M', u'y'), + (0x1D59F, 'M', u'z'), + (0x1D5A0, 'M', u'a'), + (0x1D5A1, 'M', u'b'), + (0x1D5A2, 'M', u'c'), + (0x1D5A3, 'M', u'd'), + (0x1D5A4, 'M', u'e'), + (0x1D5A5, 'M', u'f'), + (0x1D5A6, 'M', u'g'), + (0x1D5A7, 'M', u'h'), + (0x1D5A8, 'M', u'i'), + (0x1D5A9, 'M', u'j'), + (0x1D5AA, 'M', u'k'), + (0x1D5AB, 'M', u'l'), + (0x1D5AC, 'M', u'm'), + (0x1D5AD, 'M', u'n'), + (0x1D5AE, 'M', u'o'), + (0x1D5AF, 'M', u'p'), + (0x1D5B0, 'M', u'q'), + (0x1D5B1, 'M', u'r'), + (0x1D5B2, 'M', u's'), + (0x1D5B3, 'M', u't'), + (0x1D5B4, 'M', u'u'), + (0x1D5B5, 'M', u'v'), + (0x1D5B6, 'M', u'w'), + (0x1D5B7, 'M', u'x'), + (0x1D5B8, 'M', u'y'), + (0x1D5B9, 'M', u'z'), + (0x1D5BA, 'M', u'a'), + (0x1D5BB, 'M', u'b'), + (0x1D5BC, 'M', u'c'), + (0x1D5BD, 'M', u'd'), + (0x1D5BE, 'M', u'e'), + (0x1D5BF, 'M', u'f'), + (0x1D5C0, 'M', u'g'), + (0x1D5C1, 'M', u'h'), + (0x1D5C2, 'M', u'i'), + (0x1D5C3, 'M', u'j'), + (0x1D5C4, 'M', u'k'), + (0x1D5C5, 'M', u'l'), + (0x1D5C6, 'M', u'm'), + (0x1D5C7, 'M', u'n'), + (0x1D5C8, 'M', u'o'), + (0x1D5C9, 'M', u'p'), + (0x1D5CA, 'M', u'q'), + (0x1D5CB, 'M', u'r'), + (0x1D5CC, 'M', u's'), + (0x1D5CD, 'M', u't'), + (0x1D5CE, 'M', u'u'), + (0x1D5CF, 'M', u'v'), + (0x1D5D0, 'M', u'w'), + (0x1D5D1, 'M', u'x'), + (0x1D5D2, 'M', u'y'), + (0x1D5D3, 'M', u'z'), + (0x1D5D4, 'M', u'a'), + (0x1D5D5, 'M', u'b'), + (0x1D5D6, 'M', u'c'), + (0x1D5D7, 'M', u'd'), + (0x1D5D8, 'M', u'e'), + (0x1D5D9, 'M', u'f'), + (0x1D5DA, 'M', u'g'), + (0x1D5DB, 'M', u'h'), + (0x1D5DC, 'M', u'i'), + (0x1D5DD, 'M', u'j'), + (0x1D5DE, 'M', u'k'), + (0x1D5DF, 'M', u'l'), + (0x1D5E0, 'M', u'm'), + (0x1D5E1, 'M', u'n'), + (0x1D5E2, 'M', u'o'), + (0x1D5E3, 'M', u'p'), + (0x1D5E4, 'M', u'q'), + (0x1D5E5, 'M', u'r'), + (0x1D5E6, 'M', u's'), + (0x1D5E7, 'M', u't'), + (0x1D5E8, 'M', u'u'), + (0x1D5E9, 'M', u'v'), + (0x1D5EA, 'M', u'w'), + (0x1D5EB, 'M', u'x'), + (0x1D5EC, 'M', u'y'), + (0x1D5ED, 'M', u'z'), + (0x1D5EE, 'M', u'a'), + (0x1D5EF, 'M', u'b'), + (0x1D5F0, 'M', u'c'), + (0x1D5F1, 'M', u'd'), + (0x1D5F2, 'M', u'e'), + ] + +def _seg_63(): + return [ + (0x1D5F3, 'M', u'f'), + (0x1D5F4, 'M', u'g'), + (0x1D5F5, 'M', u'h'), + (0x1D5F6, 'M', u'i'), + (0x1D5F7, 'M', u'j'), + (0x1D5F8, 'M', u'k'), + (0x1D5F9, 'M', u'l'), + (0x1D5FA, 'M', u'm'), + (0x1D5FB, 'M', u'n'), + (0x1D5FC, 'M', u'o'), + (0x1D5FD, 'M', u'p'), + (0x1D5FE, 'M', u'q'), + (0x1D5FF, 'M', u'r'), + (0x1D600, 'M', u's'), + (0x1D601, 'M', u't'), + (0x1D602, 'M', u'u'), + (0x1D603, 'M', u'v'), + (0x1D604, 'M', u'w'), + (0x1D605, 'M', u'x'), + (0x1D606, 'M', u'y'), + (0x1D607, 'M', u'z'), + (0x1D608, 'M', u'a'), + (0x1D609, 'M', u'b'), + (0x1D60A, 'M', u'c'), + (0x1D60B, 'M', u'd'), + (0x1D60C, 'M', u'e'), + (0x1D60D, 'M', u'f'), + (0x1D60E, 'M', u'g'), + (0x1D60F, 'M', u'h'), + (0x1D610, 'M', u'i'), + (0x1D611, 'M', u'j'), + (0x1D612, 'M', u'k'), + (0x1D613, 'M', u'l'), + (0x1D614, 'M', u'm'), + (0x1D615, 'M', u'n'), + (0x1D616, 'M', u'o'), + (0x1D617, 'M', u'p'), + (0x1D618, 'M', u'q'), + (0x1D619, 'M', u'r'), + (0x1D61A, 'M', u's'), + (0x1D61B, 'M', u't'), + (0x1D61C, 'M', u'u'), + (0x1D61D, 'M', u'v'), + (0x1D61E, 'M', u'w'), + (0x1D61F, 'M', u'x'), + (0x1D620, 'M', u'y'), + (0x1D621, 'M', u'z'), + (0x1D622, 'M', u'a'), + (0x1D623, 'M', u'b'), + (0x1D624, 'M', u'c'), + (0x1D625, 'M', u'd'), + (0x1D626, 'M', u'e'), + (0x1D627, 'M', u'f'), + (0x1D628, 'M', u'g'), + (0x1D629, 'M', u'h'), + (0x1D62A, 'M', u'i'), + (0x1D62B, 'M', u'j'), + (0x1D62C, 'M', u'k'), + (0x1D62D, 'M', u'l'), + (0x1D62E, 'M', u'm'), + (0x1D62F, 'M', u'n'), + (0x1D630, 'M', u'o'), + (0x1D631, 'M', u'p'), + (0x1D632, 'M', u'q'), + (0x1D633, 'M', u'r'), + (0x1D634, 'M', u's'), + (0x1D635, 'M', u't'), + (0x1D636, 'M', u'u'), + (0x1D637, 'M', u'v'), + (0x1D638, 'M', u'w'), + (0x1D639, 'M', u'x'), + (0x1D63A, 'M', u'y'), + (0x1D63B, 'M', u'z'), + (0x1D63C, 'M', u'a'), + (0x1D63D, 'M', u'b'), + (0x1D63E, 'M', u'c'), + (0x1D63F, 'M', u'd'), + (0x1D640, 'M', u'e'), + (0x1D641, 'M', u'f'), + (0x1D642, 'M', u'g'), + (0x1D643, 'M', u'h'), + (0x1D644, 'M', u'i'), + (0x1D645, 'M', u'j'), + (0x1D646, 'M', u'k'), + (0x1D647, 'M', u'l'), + (0x1D648, 'M', u'm'), + (0x1D649, 'M', u'n'), + (0x1D64A, 'M', u'o'), + (0x1D64B, 'M', u'p'), + (0x1D64C, 'M', u'q'), + (0x1D64D, 'M', u'r'), + (0x1D64E, 'M', u's'), + (0x1D64F, 'M', u't'), + (0x1D650, 'M', u'u'), + (0x1D651, 'M', u'v'), + (0x1D652, 'M', u'w'), + (0x1D653, 'M', u'x'), + (0x1D654, 'M', u'y'), + (0x1D655, 'M', u'z'), + (0x1D656, 'M', u'a'), + ] + +def _seg_64(): + return [ + (0x1D657, 'M', u'b'), + (0x1D658, 'M', u'c'), + (0x1D659, 'M', u'd'), + (0x1D65A, 'M', u'e'), + (0x1D65B, 'M', u'f'), + (0x1D65C, 'M', u'g'), + (0x1D65D, 'M', u'h'), + (0x1D65E, 'M', u'i'), + (0x1D65F, 'M', u'j'), + (0x1D660, 'M', u'k'), + (0x1D661, 'M', u'l'), + (0x1D662, 'M', u'm'), + (0x1D663, 'M', u'n'), + (0x1D664, 'M', u'o'), + (0x1D665, 'M', u'p'), + (0x1D666, 'M', u'q'), + (0x1D667, 'M', u'r'), + (0x1D668, 'M', u's'), + (0x1D669, 'M', u't'), + (0x1D66A, 'M', u'u'), + (0x1D66B, 'M', u'v'), + (0x1D66C, 'M', u'w'), + (0x1D66D, 'M', u'x'), + (0x1D66E, 'M', u'y'), + (0x1D66F, 'M', u'z'), + (0x1D670, 'M', u'a'), + (0x1D671, 'M', u'b'), + (0x1D672, 'M', u'c'), + (0x1D673, 'M', u'd'), + (0x1D674, 'M', u'e'), + (0x1D675, 'M', u'f'), + (0x1D676, 'M', u'g'), + (0x1D677, 'M', u'h'), + (0x1D678, 'M', u'i'), + (0x1D679, 'M', u'j'), + (0x1D67A, 'M', u'k'), + (0x1D67B, 'M', u'l'), + (0x1D67C, 'M', u'm'), + (0x1D67D, 'M', u'n'), + (0x1D67E, 'M', u'o'), + (0x1D67F, 'M', u'p'), + (0x1D680, 'M', u'q'), + (0x1D681, 'M', u'r'), + (0x1D682, 'M', u's'), + (0x1D683, 'M', u't'), + (0x1D684, 'M', u'u'), + (0x1D685, 'M', u'v'), + (0x1D686, 'M', u'w'), + (0x1D687, 'M', u'x'), + (0x1D688, 'M', u'y'), + (0x1D689, 'M', u'z'), + (0x1D68A, 'M', u'a'), + (0x1D68B, 'M', u'b'), + (0x1D68C, 'M', u'c'), + (0x1D68D, 'M', u'd'), + (0x1D68E, 'M', u'e'), + (0x1D68F, 'M', u'f'), + (0x1D690, 'M', u'g'), + (0x1D691, 'M', u'h'), + (0x1D692, 'M', u'i'), + (0x1D693, 'M', u'j'), + (0x1D694, 'M', u'k'), + (0x1D695, 'M', u'l'), + (0x1D696, 'M', u'm'), + (0x1D697, 'M', u'n'), + (0x1D698, 'M', u'o'), + (0x1D699, 'M', u'p'), + (0x1D69A, 'M', u'q'), + (0x1D69B, 'M', u'r'), + (0x1D69C, 'M', u's'), + (0x1D69D, 'M', u't'), + (0x1D69E, 'M', u'u'), + (0x1D69F, 'M', u'v'), + (0x1D6A0, 'M', u'w'), + (0x1D6A1, 'M', u'x'), + (0x1D6A2, 'M', u'y'), + (0x1D6A3, 'M', u'z'), + (0x1D6A4, 'M', u'ı'), + (0x1D6A5, 'M', u'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', u'α'), + (0x1D6A9, 'M', u'β'), + (0x1D6AA, 'M', u'γ'), + (0x1D6AB, 'M', u'δ'), + (0x1D6AC, 'M', u'ε'), + (0x1D6AD, 'M', u'ζ'), + (0x1D6AE, 'M', u'η'), + (0x1D6AF, 'M', u'θ'), + (0x1D6B0, 'M', u'ι'), + (0x1D6B1, 'M', u'κ'), + (0x1D6B2, 'M', u'λ'), + (0x1D6B3, 'M', u'μ'), + (0x1D6B4, 'M', u'ν'), + (0x1D6B5, 'M', u'ξ'), + (0x1D6B6, 'M', u'ο'), + (0x1D6B7, 'M', u'π'), + (0x1D6B8, 'M', u'ρ'), + (0x1D6B9, 'M', u'θ'), + (0x1D6BA, 'M', u'σ'), + (0x1D6BB, 'M', u'τ'), + ] + +def _seg_65(): + return [ + (0x1D6BC, 'M', u'υ'), + (0x1D6BD, 'M', u'φ'), + (0x1D6BE, 'M', u'χ'), + (0x1D6BF, 'M', u'ψ'), + (0x1D6C0, 'M', u'ω'), + (0x1D6C1, 'M', u'∇'), + (0x1D6C2, 'M', u'α'), + (0x1D6C3, 'M', u'β'), + (0x1D6C4, 'M', u'γ'), + (0x1D6C5, 'M', u'δ'), + (0x1D6C6, 'M', u'ε'), + (0x1D6C7, 'M', u'ζ'), + (0x1D6C8, 'M', u'η'), + (0x1D6C9, 'M', u'θ'), + (0x1D6CA, 'M', u'ι'), + (0x1D6CB, 'M', u'κ'), + (0x1D6CC, 'M', u'λ'), + (0x1D6CD, 'M', u'μ'), + (0x1D6CE, 'M', u'ν'), + (0x1D6CF, 'M', u'ξ'), + (0x1D6D0, 'M', u'ο'), + (0x1D6D1, 'M', u'π'), + (0x1D6D2, 'M', u'ρ'), + (0x1D6D3, 'M', u'σ'), + (0x1D6D5, 'M', u'τ'), + (0x1D6D6, 'M', u'υ'), + (0x1D6D7, 'M', u'φ'), + (0x1D6D8, 'M', u'χ'), + (0x1D6D9, 'M', u'ψ'), + (0x1D6DA, 'M', u'ω'), + (0x1D6DB, 'M', u'∂'), + (0x1D6DC, 'M', u'ε'), + (0x1D6DD, 'M', u'θ'), + (0x1D6DE, 'M', u'κ'), + (0x1D6DF, 'M', u'φ'), + (0x1D6E0, 'M', u'ρ'), + (0x1D6E1, 'M', u'π'), + (0x1D6E2, 'M', u'α'), + (0x1D6E3, 'M', u'β'), + (0x1D6E4, 'M', u'γ'), + (0x1D6E5, 'M', u'δ'), + (0x1D6E6, 'M', u'ε'), + (0x1D6E7, 'M', u'ζ'), + (0x1D6E8, 'M', u'η'), + (0x1D6E9, 'M', u'θ'), + (0x1D6EA, 'M', u'ι'), + (0x1D6EB, 'M', u'κ'), + (0x1D6EC, 'M', u'λ'), + (0x1D6ED, 'M', u'μ'), + (0x1D6EE, 'M', u'ν'), + (0x1D6EF, 'M', u'ξ'), + (0x1D6F0, 'M', u'ο'), + (0x1D6F1, 'M', u'π'), + (0x1D6F2, 'M', u'ρ'), + (0x1D6F3, 'M', u'θ'), + (0x1D6F4, 'M', u'σ'), + (0x1D6F5, 'M', u'τ'), + (0x1D6F6, 'M', u'υ'), + (0x1D6F7, 'M', u'φ'), + (0x1D6F8, 'M', u'χ'), + (0x1D6F9, 'M', u'ψ'), + (0x1D6FA, 'M', u'ω'), + (0x1D6FB, 'M', u'∇'), + (0x1D6FC, 'M', u'α'), + (0x1D6FD, 'M', u'β'), + (0x1D6FE, 'M', u'γ'), + (0x1D6FF, 'M', u'δ'), + (0x1D700, 'M', u'ε'), + (0x1D701, 'M', u'ζ'), + (0x1D702, 'M', u'η'), + (0x1D703, 'M', u'θ'), + (0x1D704, 'M', u'ι'), + (0x1D705, 'M', u'κ'), + (0x1D706, 'M', u'λ'), + (0x1D707, 'M', u'μ'), + (0x1D708, 'M', u'ν'), + (0x1D709, 'M', u'ξ'), + (0x1D70A, 'M', u'ο'), + (0x1D70B, 'M', u'π'), + (0x1D70C, 'M', u'ρ'), + (0x1D70D, 'M', u'σ'), + (0x1D70F, 'M', u'τ'), + (0x1D710, 'M', u'υ'), + (0x1D711, 'M', u'φ'), + (0x1D712, 'M', u'χ'), + (0x1D713, 'M', u'ψ'), + (0x1D714, 'M', u'ω'), + (0x1D715, 'M', u'∂'), + (0x1D716, 'M', u'ε'), + (0x1D717, 'M', u'θ'), + (0x1D718, 'M', u'κ'), + (0x1D719, 'M', u'φ'), + (0x1D71A, 'M', u'ρ'), + (0x1D71B, 'M', u'π'), + (0x1D71C, 'M', u'α'), + (0x1D71D, 'M', u'β'), + (0x1D71E, 'M', u'γ'), + (0x1D71F, 'M', u'δ'), + (0x1D720, 'M', u'ε'), + (0x1D721, 'M', u'ζ'), + ] + +def _seg_66(): + return [ + (0x1D722, 'M', u'η'), + (0x1D723, 'M', u'θ'), + (0x1D724, 'M', u'ι'), + (0x1D725, 'M', u'κ'), + (0x1D726, 'M', u'λ'), + (0x1D727, 'M', u'μ'), + (0x1D728, 'M', u'ν'), + (0x1D729, 'M', u'ξ'), + (0x1D72A, 'M', u'ο'), + (0x1D72B, 'M', u'π'), + (0x1D72C, 'M', u'ρ'), + (0x1D72D, 'M', u'θ'), + (0x1D72E, 'M', u'σ'), + (0x1D72F, 'M', u'τ'), + (0x1D730, 'M', u'υ'), + (0x1D731, 'M', u'φ'), + (0x1D732, 'M', u'χ'), + (0x1D733, 'M', u'ψ'), + (0x1D734, 'M', u'ω'), + (0x1D735, 'M', u'∇'), + (0x1D736, 'M', u'α'), + (0x1D737, 'M', u'β'), + (0x1D738, 'M', u'γ'), + (0x1D739, 'M', u'δ'), + (0x1D73A, 'M', u'ε'), + (0x1D73B, 'M', u'ζ'), + (0x1D73C, 'M', u'η'), + (0x1D73D, 'M', u'θ'), + (0x1D73E, 'M', u'ι'), + (0x1D73F, 'M', u'κ'), + (0x1D740, 'M', u'λ'), + (0x1D741, 'M', u'μ'), + (0x1D742, 'M', u'ν'), + (0x1D743, 'M', u'ξ'), + (0x1D744, 'M', u'ο'), + (0x1D745, 'M', u'π'), + (0x1D746, 'M', u'ρ'), + (0x1D747, 'M', u'σ'), + (0x1D749, 'M', u'τ'), + (0x1D74A, 'M', u'υ'), + (0x1D74B, 'M', u'φ'), + (0x1D74C, 'M', u'χ'), + (0x1D74D, 'M', u'ψ'), + (0x1D74E, 'M', u'ω'), + (0x1D74F, 'M', u'∂'), + (0x1D750, 'M', u'ε'), + (0x1D751, 'M', u'θ'), + (0x1D752, 'M', u'κ'), + (0x1D753, 'M', u'φ'), + (0x1D754, 'M', u'ρ'), + (0x1D755, 'M', u'π'), + (0x1D756, 'M', u'α'), + (0x1D757, 'M', u'β'), + (0x1D758, 'M', u'γ'), + (0x1D759, 'M', u'δ'), + (0x1D75A, 'M', u'ε'), + (0x1D75B, 'M', u'ζ'), + (0x1D75C, 'M', u'η'), + (0x1D75D, 'M', u'θ'), + (0x1D75E, 'M', u'ι'), + (0x1D75F, 'M', u'κ'), + (0x1D760, 'M', u'λ'), + (0x1D761, 'M', u'μ'), + (0x1D762, 'M', u'ν'), + (0x1D763, 'M', u'ξ'), + (0x1D764, 'M', u'ο'), + (0x1D765, 'M', u'π'), + (0x1D766, 'M', u'ρ'), + (0x1D767, 'M', u'θ'), + (0x1D768, 'M', u'σ'), + (0x1D769, 'M', u'τ'), + (0x1D76A, 'M', u'υ'), + (0x1D76B, 'M', u'φ'), + (0x1D76C, 'M', u'χ'), + (0x1D76D, 'M', u'ψ'), + (0x1D76E, 'M', u'ω'), + (0x1D76F, 'M', u'∇'), + (0x1D770, 'M', u'α'), + (0x1D771, 'M', u'β'), + (0x1D772, 'M', u'γ'), + (0x1D773, 'M', u'δ'), + (0x1D774, 'M', u'ε'), + (0x1D775, 'M', u'ζ'), + (0x1D776, 'M', u'η'), + (0x1D777, 'M', u'θ'), + (0x1D778, 'M', u'ι'), + (0x1D779, 'M', u'κ'), + (0x1D77A, 'M', u'λ'), + (0x1D77B, 'M', u'μ'), + (0x1D77C, 'M', u'ν'), + (0x1D77D, 'M', u'ξ'), + (0x1D77E, 'M', u'ο'), + (0x1D77F, 'M', u'π'), + (0x1D780, 'M', u'ρ'), + (0x1D781, 'M', u'σ'), + (0x1D783, 'M', u'τ'), + (0x1D784, 'M', u'υ'), + (0x1D785, 'M', u'φ'), + (0x1D786, 'M', u'χ'), + (0x1D787, 'M', u'ψ'), + ] + +def _seg_67(): + return [ + (0x1D788, 'M', u'ω'), + (0x1D789, 'M', u'∂'), + (0x1D78A, 'M', u'ε'), + (0x1D78B, 'M', u'θ'), + (0x1D78C, 'M', u'κ'), + (0x1D78D, 'M', u'φ'), + (0x1D78E, 'M', u'ρ'), + (0x1D78F, 'M', u'π'), + (0x1D790, 'M', u'α'), + (0x1D791, 'M', u'β'), + (0x1D792, 'M', u'γ'), + (0x1D793, 'M', u'δ'), + (0x1D794, 'M', u'ε'), + (0x1D795, 'M', u'ζ'), + (0x1D796, 'M', u'η'), + (0x1D797, 'M', u'θ'), + (0x1D798, 'M', u'ι'), + (0x1D799, 'M', u'κ'), + (0x1D79A, 'M', u'λ'), + (0x1D79B, 'M', u'μ'), + (0x1D79C, 'M', u'ν'), + (0x1D79D, 'M', u'ξ'), + (0x1D79E, 'M', u'ο'), + (0x1D79F, 'M', u'π'), + (0x1D7A0, 'M', u'ρ'), + (0x1D7A1, 'M', u'θ'), + (0x1D7A2, 'M', u'σ'), + (0x1D7A3, 'M', u'τ'), + (0x1D7A4, 'M', u'υ'), + (0x1D7A5, 'M', u'φ'), + (0x1D7A6, 'M', u'χ'), + (0x1D7A7, 'M', u'ψ'), + (0x1D7A8, 'M', u'ω'), + (0x1D7A9, 'M', u'∇'), + (0x1D7AA, 'M', u'α'), + (0x1D7AB, 'M', u'β'), + (0x1D7AC, 'M', u'γ'), + (0x1D7AD, 'M', u'δ'), + (0x1D7AE, 'M', u'ε'), + (0x1D7AF, 'M', u'ζ'), + (0x1D7B0, 'M', u'η'), + (0x1D7B1, 'M', u'θ'), + (0x1D7B2, 'M', u'ι'), + (0x1D7B3, 'M', u'κ'), + (0x1D7B4, 'M', u'λ'), + (0x1D7B5, 'M', u'μ'), + (0x1D7B6, 'M', u'ν'), + (0x1D7B7, 'M', u'ξ'), + (0x1D7B8, 'M', u'ο'), + (0x1D7B9, 'M', u'π'), + (0x1D7BA, 'M', u'ρ'), + (0x1D7BB, 'M', u'σ'), + (0x1D7BD, 'M', u'τ'), + (0x1D7BE, 'M', u'υ'), + (0x1D7BF, 'M', u'φ'), + (0x1D7C0, 'M', u'χ'), + (0x1D7C1, 'M', u'ψ'), + (0x1D7C2, 'M', u'ω'), + (0x1D7C3, 'M', u'∂'), + (0x1D7C4, 'M', u'ε'), + (0x1D7C5, 'M', u'θ'), + (0x1D7C6, 'M', u'κ'), + (0x1D7C7, 'M', u'φ'), + (0x1D7C8, 'M', u'ρ'), + (0x1D7C9, 'M', u'π'), + (0x1D7CA, 'M', u'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', u'0'), + (0x1D7CF, 'M', u'1'), + (0x1D7D0, 'M', u'2'), + (0x1D7D1, 'M', u'3'), + (0x1D7D2, 'M', u'4'), + (0x1D7D3, 'M', u'5'), + (0x1D7D4, 'M', u'6'), + (0x1D7D5, 'M', u'7'), + (0x1D7D6, 'M', u'8'), + (0x1D7D7, 'M', u'9'), + (0x1D7D8, 'M', u'0'), + (0x1D7D9, 'M', u'1'), + (0x1D7DA, 'M', u'2'), + (0x1D7DB, 'M', u'3'), + (0x1D7DC, 'M', u'4'), + (0x1D7DD, 'M', u'5'), + (0x1D7DE, 'M', u'6'), + (0x1D7DF, 'M', u'7'), + (0x1D7E0, 'M', u'8'), + (0x1D7E1, 'M', u'9'), + (0x1D7E2, 'M', u'0'), + (0x1D7E3, 'M', u'1'), + (0x1D7E4, 'M', u'2'), + (0x1D7E5, 'M', u'3'), + (0x1D7E6, 'M', u'4'), + (0x1D7E7, 'M', u'5'), + (0x1D7E8, 'M', u'6'), + (0x1D7E9, 'M', u'7'), + (0x1D7EA, 'M', u'8'), + (0x1D7EB, 'M', u'9'), + (0x1D7EC, 'M', u'0'), + (0x1D7ED, 'M', u'1'), + (0x1D7EE, 'M', u'2'), + ] + +def _seg_68(): + return [ + (0x1D7EF, 'M', u'3'), + (0x1D7F0, 'M', u'4'), + (0x1D7F1, 'M', u'5'), + (0x1D7F2, 'M', u'6'), + (0x1D7F3, 'M', u'7'), + (0x1D7F4, 'M', u'8'), + (0x1D7F5, 'M', u'9'), + (0x1D7F6, 'M', u'0'), + (0x1D7F7, 'M', u'1'), + (0x1D7F8, 'M', u'2'), + (0x1D7F9, 'M', u'3'), + (0x1D7FA, 'M', u'4'), + (0x1D7FB, 'M', u'5'), + (0x1D7FC, 'M', u'6'), + (0x1D7FD, 'M', u'7'), + (0x1D7FE, 'M', u'8'), + (0x1D7FF, 'M', u'9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', u'𞤢'), + (0x1E901, 'M', u'𞤣'), + (0x1E902, 'M', u'𞤤'), + (0x1E903, 'M', u'𞤥'), + (0x1E904, 'M', u'𞤦'), + (0x1E905, 'M', u'𞤧'), + (0x1E906, 'M', u'𞤨'), + (0x1E907, 'M', u'𞤩'), + (0x1E908, 'M', u'𞤪'), + (0x1E909, 'M', u'𞤫'), + (0x1E90A, 'M', u'𞤬'), + (0x1E90B, 'M', u'𞤭'), + (0x1E90C, 'M', u'𞤮'), + (0x1E90D, 'M', u'𞤯'), + (0x1E90E, 'M', u'𞤰'), + (0x1E90F, 'M', u'𞤱'), + (0x1E910, 'M', u'𞤲'), + (0x1E911, 'M', u'𞤳'), + (0x1E912, 'M', u'𞤴'), + (0x1E913, 'M', u'𞤵'), + (0x1E914, 'M', u'𞤶'), + (0x1E915, 'M', u'𞤷'), + (0x1E916, 'M', u'𞤸'), + (0x1E917, 'M', u'𞤹'), + (0x1E918, 'M', u'𞤺'), + (0x1E919, 'M', u'𞤻'), + (0x1E91A, 'M', u'𞤼'), + (0x1E91B, 'M', u'𞤽'), + (0x1E91C, 'M', u'𞤾'), + (0x1E91D, 'M', u'𞤿'), + (0x1E91E, 'M', u'𞥀'), + (0x1E91F, 'M', u'𞥁'), + (0x1E920, 'M', u'𞥂'), + (0x1E921, 'M', u'𞥃'), + (0x1E922, 'V'), + (0x1E94B, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1EE00, 'M', u'ا'), + (0x1EE01, 'M', u'ب'), + (0x1EE02, 'M', u'ج'), + (0x1EE03, 'M', u'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', u'و'), + (0x1EE06, 'M', u'ز'), + (0x1EE07, 'M', u'ح'), + (0x1EE08, 'M', u'ط'), + (0x1EE09, 'M', u'ي'), + (0x1EE0A, 'M', u'ك'), + (0x1EE0B, 'M', u'ل'), + (0x1EE0C, 'M', u'م'), + (0x1EE0D, 'M', u'ن'), + (0x1EE0E, 'M', u'س'), + (0x1EE0F, 'M', u'ع'), + (0x1EE10, 'M', u'ف'), + (0x1EE11, 'M', u'ص'), + (0x1EE12, 'M', u'ق'), + (0x1EE13, 'M', u'ر'), + (0x1EE14, 'M', u'ش'), + ] + +def _seg_69(): + return [ + (0x1EE15, 'M', u'ت'), + (0x1EE16, 'M', u'ث'), + (0x1EE17, 'M', u'خ'), + (0x1EE18, 'M', u'ذ'), + (0x1EE19, 'M', u'ض'), + (0x1EE1A, 'M', u'ظ'), + (0x1EE1B, 'M', u'غ'), + (0x1EE1C, 'M', u'ٮ'), + (0x1EE1D, 'M', u'ں'), + (0x1EE1E, 'M', u'ڡ'), + (0x1EE1F, 'M', u'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', u'ب'), + (0x1EE22, 'M', u'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', u'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', u'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', u'ي'), + (0x1EE2A, 'M', u'ك'), + (0x1EE2B, 'M', u'ل'), + (0x1EE2C, 'M', u'م'), + (0x1EE2D, 'M', u'ن'), + (0x1EE2E, 'M', u'س'), + (0x1EE2F, 'M', u'ع'), + (0x1EE30, 'M', u'ف'), + (0x1EE31, 'M', u'ص'), + (0x1EE32, 'M', u'ق'), + (0x1EE33, 'X'), + (0x1EE34, 'M', u'ش'), + (0x1EE35, 'M', u'ت'), + (0x1EE36, 'M', u'ث'), + (0x1EE37, 'M', u'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', u'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', u'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', u'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', u'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', u'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', u'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', u'ن'), + (0x1EE4E, 'M', u'س'), + (0x1EE4F, 'M', u'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', u'ص'), + (0x1EE52, 'M', u'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', u'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', u'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', u'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', u'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', u'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', u'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', u'ب'), + (0x1EE62, 'M', u'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', u'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', u'ح'), + (0x1EE68, 'M', u'ط'), + (0x1EE69, 'M', u'ي'), + (0x1EE6A, 'M', u'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', u'م'), + (0x1EE6D, 'M', u'ن'), + (0x1EE6E, 'M', u'س'), + (0x1EE6F, 'M', u'ع'), + (0x1EE70, 'M', u'ف'), + (0x1EE71, 'M', u'ص'), + (0x1EE72, 'M', u'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', u'ش'), + (0x1EE75, 'M', u'ت'), + (0x1EE76, 'M', u'ث'), + (0x1EE77, 'M', u'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', u'ض'), + (0x1EE7A, 'M', u'ظ'), + (0x1EE7B, 'M', u'غ'), + (0x1EE7C, 'M', u'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', u'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', u'ا'), + (0x1EE81, 'M', u'ب'), + (0x1EE82, 'M', u'ج'), + (0x1EE83, 'M', u'د'), + ] + +def _seg_70(): + return [ + (0x1EE84, 'M', u'ه'), + (0x1EE85, 'M', u'و'), + (0x1EE86, 'M', u'ز'), + (0x1EE87, 'M', u'ح'), + (0x1EE88, 'M', u'ط'), + (0x1EE89, 'M', u'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', u'ل'), + (0x1EE8C, 'M', u'م'), + (0x1EE8D, 'M', u'ن'), + (0x1EE8E, 'M', u'س'), + (0x1EE8F, 'M', u'ع'), + (0x1EE90, 'M', u'ف'), + (0x1EE91, 'M', u'ص'), + (0x1EE92, 'M', u'ق'), + (0x1EE93, 'M', u'ر'), + (0x1EE94, 'M', u'ش'), + (0x1EE95, 'M', u'ت'), + (0x1EE96, 'M', u'ث'), + (0x1EE97, 'M', u'خ'), + (0x1EE98, 'M', u'ذ'), + (0x1EE99, 'M', u'ض'), + (0x1EE9A, 'M', u'ظ'), + (0x1EE9B, 'M', u'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', u'ب'), + (0x1EEA2, 'M', u'ج'), + (0x1EEA3, 'M', u'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', u'و'), + (0x1EEA6, 'M', u'ز'), + (0x1EEA7, 'M', u'ح'), + (0x1EEA8, 'M', u'ط'), + (0x1EEA9, 'M', u'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', u'ل'), + (0x1EEAC, 'M', u'م'), + (0x1EEAD, 'M', u'ن'), + (0x1EEAE, 'M', u'س'), + (0x1EEAF, 'M', u'ع'), + (0x1EEB0, 'M', u'ف'), + (0x1EEB1, 'M', u'ص'), + (0x1EEB2, 'M', u'ق'), + (0x1EEB3, 'M', u'ر'), + (0x1EEB4, 'M', u'ش'), + (0x1EEB5, 'M', u'ت'), + (0x1EEB6, 'M', u'ث'), + (0x1EEB7, 'M', u'خ'), + (0x1EEB8, 'M', u'ذ'), + (0x1EEB9, 'M', u'ض'), + (0x1EEBA, 'M', u'ظ'), + (0x1EEBB, 'M', u'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', u'0,'), + (0x1F102, '3', u'1,'), + (0x1F103, '3', u'2,'), + (0x1F104, '3', u'3,'), + (0x1F105, '3', u'4,'), + (0x1F106, '3', u'5,'), + (0x1F107, '3', u'6,'), + (0x1F108, '3', u'7,'), + (0x1F109, '3', u'8,'), + (0x1F10A, '3', u'9,'), + (0x1F10B, 'V'), + (0x1F10D, 'X'), + (0x1F110, '3', u'(a)'), + (0x1F111, '3', u'(b)'), + (0x1F112, '3', u'(c)'), + (0x1F113, '3', u'(d)'), + (0x1F114, '3', u'(e)'), + (0x1F115, '3', u'(f)'), + (0x1F116, '3', u'(g)'), + (0x1F117, '3', u'(h)'), + (0x1F118, '3', u'(i)'), + (0x1F119, '3', u'(j)'), + (0x1F11A, '3', u'(k)'), + (0x1F11B, '3', u'(l)'), + (0x1F11C, '3', u'(m)'), + (0x1F11D, '3', u'(n)'), + (0x1F11E, '3', u'(o)'), + (0x1F11F, '3', u'(p)'), + (0x1F120, '3', u'(q)'), + (0x1F121, '3', u'(r)'), + (0x1F122, '3', u'(s)'), + (0x1F123, '3', u'(t)'), + (0x1F124, '3', u'(u)'), + ] + +def _seg_71(): + return [ + (0x1F125, '3', u'(v)'), + (0x1F126, '3', u'(w)'), + (0x1F127, '3', u'(x)'), + (0x1F128, '3', u'(y)'), + (0x1F129, '3', u'(z)'), + (0x1F12A, 'M', u'〔s〕'), + (0x1F12B, 'M', u'c'), + (0x1F12C, 'M', u'r'), + (0x1F12D, 'M', u'cd'), + (0x1F12E, 'M', u'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', u'a'), + (0x1F131, 'M', u'b'), + (0x1F132, 'M', u'c'), + (0x1F133, 'M', u'd'), + (0x1F134, 'M', u'e'), + (0x1F135, 'M', u'f'), + (0x1F136, 'M', u'g'), + (0x1F137, 'M', u'h'), + (0x1F138, 'M', u'i'), + (0x1F139, 'M', u'j'), + (0x1F13A, 'M', u'k'), + (0x1F13B, 'M', u'l'), + (0x1F13C, 'M', u'm'), + (0x1F13D, 'M', u'n'), + (0x1F13E, 'M', u'o'), + (0x1F13F, 'M', u'p'), + (0x1F140, 'M', u'q'), + (0x1F141, 'M', u'r'), + (0x1F142, 'M', u's'), + (0x1F143, 'M', u't'), + (0x1F144, 'M', u'u'), + (0x1F145, 'M', u'v'), + (0x1F146, 'M', u'w'), + (0x1F147, 'M', u'x'), + (0x1F148, 'M', u'y'), + (0x1F149, 'M', u'z'), + (0x1F14A, 'M', u'hv'), + (0x1F14B, 'M', u'mv'), + (0x1F14C, 'M', u'sd'), + (0x1F14D, 'M', u'ss'), + (0x1F14E, 'M', u'ppv'), + (0x1F14F, 'M', u'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', u'mc'), + (0x1F16B, 'M', u'md'), + (0x1F16C, 'X'), + (0x1F170, 'V'), + (0x1F190, 'M', u'dj'), + (0x1F191, 'V'), + (0x1F1AD, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', u'ほか'), + (0x1F201, 'M', u'ココ'), + (0x1F202, 'M', u'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', u'手'), + (0x1F211, 'M', u'字'), + (0x1F212, 'M', u'双'), + (0x1F213, 'M', u'デ'), + (0x1F214, 'M', u'二'), + (0x1F215, 'M', u'多'), + (0x1F216, 'M', u'解'), + (0x1F217, 'M', u'天'), + (0x1F218, 'M', u'交'), + (0x1F219, 'M', u'映'), + (0x1F21A, 'M', u'無'), + (0x1F21B, 'M', u'料'), + (0x1F21C, 'M', u'前'), + (0x1F21D, 'M', u'後'), + (0x1F21E, 'M', u'再'), + (0x1F21F, 'M', u'新'), + (0x1F220, 'M', u'初'), + (0x1F221, 'M', u'終'), + (0x1F222, 'M', u'生'), + (0x1F223, 'M', u'販'), + (0x1F224, 'M', u'声'), + (0x1F225, 'M', u'吹'), + (0x1F226, 'M', u'演'), + (0x1F227, 'M', u'投'), + (0x1F228, 'M', u'捕'), + (0x1F229, 'M', u'一'), + (0x1F22A, 'M', u'三'), + (0x1F22B, 'M', u'遊'), + (0x1F22C, 'M', u'左'), + (0x1F22D, 'M', u'中'), + (0x1F22E, 'M', u'右'), + (0x1F22F, 'M', u'指'), + (0x1F230, 'M', u'走'), + (0x1F231, 'M', u'打'), + (0x1F232, 'M', u'禁'), + (0x1F233, 'M', u'空'), + (0x1F234, 'M', u'合'), + (0x1F235, 'M', u'満'), + (0x1F236, 'M', u'有'), + (0x1F237, 'M', u'月'), + (0x1F238, 'M', u'申'), + (0x1F239, 'M', u'割'), + (0x1F23A, 'M', u'営'), + (0x1F23B, 'M', u'配'), + ] + +def _seg_72(): + return [ + (0x1F23C, 'X'), + (0x1F240, 'M', u'〔本〕'), + (0x1F241, 'M', u'〔三〕'), + (0x1F242, 'M', u'〔二〕'), + (0x1F243, 'M', u'〔安〕'), + (0x1F244, 'M', u'〔点〕'), + (0x1F245, 'M', u'〔打〕'), + (0x1F246, 'M', u'〔盗〕'), + (0x1F247, 'M', u'〔勝〕'), + (0x1F248, 'M', u'〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', u'得'), + (0x1F251, 'M', u'可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D5, 'X'), + (0x1F6E0, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FA, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D9, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F900, 'V'), + (0x1F90C, 'X'), + (0x1F910, 'V'), + (0x1F93F, 'X'), + (0x1F940, 'V'), + (0x1F971, 'X'), + (0x1F973, 'V'), + (0x1F977, 'X'), + (0x1F97A, 'V'), + (0x1F97B, 'X'), + (0x1F97C, 'V'), + (0x1F9A3, 'X'), + (0x1F9B0, 'V'), + (0x1F9BA, 'X'), + (0x1F9C0, 'V'), + (0x1F9C3, 'X'), + (0x1F9D0, 'V'), + (0x1FA00, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + (0x20000, 'V'), + (0x2A6D7, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', u'丽'), + (0x2F801, 'M', u'丸'), + (0x2F802, 'M', u'乁'), + (0x2F803, 'M', u'𠄢'), + (0x2F804, 'M', u'你'), + (0x2F805, 'M', u'侮'), + (0x2F806, 'M', u'侻'), + (0x2F807, 'M', u'倂'), + (0x2F808, 'M', u'偺'), + (0x2F809, 'M', u'備'), + (0x2F80A, 'M', u'僧'), + (0x2F80B, 'M', u'像'), + (0x2F80C, 'M', u'㒞'), + (0x2F80D, 'M', u'𠘺'), + (0x2F80E, 'M', u'免'), + (0x2F80F, 'M', u'兔'), + (0x2F810, 'M', u'兤'), + (0x2F811, 'M', u'具'), + (0x2F812, 'M', u'𠔜'), + (0x2F813, 'M', u'㒹'), + (0x2F814, 'M', u'內'), + (0x2F815, 'M', u'再'), + (0x2F816, 'M', u'𠕋'), + (0x2F817, 'M', u'冗'), + (0x2F818, 'M', u'冤'), + (0x2F819, 'M', u'仌'), + (0x2F81A, 'M', u'冬'), + (0x2F81B, 'M', u'况'), + (0x2F81C, 'M', u'𩇟'), + (0x2F81D, 'M', u'凵'), + (0x2F81E, 'M', u'刃'), + (0x2F81F, 'M', u'㓟'), + (0x2F820, 'M', u'刻'), + (0x2F821, 'M', u'剆'), + ] + +def _seg_73(): + return [ + (0x2F822, 'M', u'割'), + (0x2F823, 'M', u'剷'), + (0x2F824, 'M', u'㔕'), + (0x2F825, 'M', u'勇'), + (0x2F826, 'M', u'勉'), + (0x2F827, 'M', u'勤'), + (0x2F828, 'M', u'勺'), + (0x2F829, 'M', u'包'), + (0x2F82A, 'M', u'匆'), + (0x2F82B, 'M', u'北'), + (0x2F82C, 'M', u'卉'), + (0x2F82D, 'M', u'卑'), + (0x2F82E, 'M', u'博'), + (0x2F82F, 'M', u'即'), + (0x2F830, 'M', u'卽'), + (0x2F831, 'M', u'卿'), + (0x2F834, 'M', u'𠨬'), + (0x2F835, 'M', u'灰'), + (0x2F836, 'M', u'及'), + (0x2F837, 'M', u'叟'), + (0x2F838, 'M', u'𠭣'), + (0x2F839, 'M', u'叫'), + (0x2F83A, 'M', u'叱'), + (0x2F83B, 'M', u'吆'), + (0x2F83C, 'M', u'咞'), + (0x2F83D, 'M', u'吸'), + (0x2F83E, 'M', u'呈'), + (0x2F83F, 'M', u'周'), + (0x2F840, 'M', u'咢'), + (0x2F841, 'M', u'哶'), + (0x2F842, 'M', u'唐'), + (0x2F843, 'M', u'啓'), + (0x2F844, 'M', u'啣'), + (0x2F845, 'M', u'善'), + (0x2F847, 'M', u'喙'), + (0x2F848, 'M', u'喫'), + (0x2F849, 'M', u'喳'), + (0x2F84A, 'M', u'嗂'), + (0x2F84B, 'M', u'圖'), + (0x2F84C, 'M', u'嘆'), + (0x2F84D, 'M', u'圗'), + (0x2F84E, 'M', u'噑'), + (0x2F84F, 'M', u'噴'), + (0x2F850, 'M', u'切'), + (0x2F851, 'M', u'壮'), + (0x2F852, 'M', u'城'), + (0x2F853, 'M', u'埴'), + (0x2F854, 'M', u'堍'), + (0x2F855, 'M', u'型'), + (0x2F856, 'M', u'堲'), + (0x2F857, 'M', u'報'), + (0x2F858, 'M', u'墬'), + (0x2F859, 'M', u'𡓤'), + (0x2F85A, 'M', u'売'), + (0x2F85B, 'M', u'壷'), + (0x2F85C, 'M', u'夆'), + (0x2F85D, 'M', u'多'), + (0x2F85E, 'M', u'夢'), + (0x2F85F, 'M', u'奢'), + (0x2F860, 'M', u'𡚨'), + (0x2F861, 'M', u'𡛪'), + (0x2F862, 'M', u'姬'), + (0x2F863, 'M', u'娛'), + (0x2F864, 'M', u'娧'), + (0x2F865, 'M', u'姘'), + (0x2F866, 'M', u'婦'), + (0x2F867, 'M', u'㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', u'嬈'), + (0x2F86A, 'M', u'嬾'), + (0x2F86C, 'M', u'𡧈'), + (0x2F86D, 'M', u'寃'), + (0x2F86E, 'M', u'寘'), + (0x2F86F, 'M', u'寧'), + (0x2F870, 'M', u'寳'), + (0x2F871, 'M', u'𡬘'), + (0x2F872, 'M', u'寿'), + (0x2F873, 'M', u'将'), + (0x2F874, 'X'), + (0x2F875, 'M', u'尢'), + (0x2F876, 'M', u'㞁'), + (0x2F877, 'M', u'屠'), + (0x2F878, 'M', u'屮'), + (0x2F879, 'M', u'峀'), + (0x2F87A, 'M', u'岍'), + (0x2F87B, 'M', u'𡷤'), + (0x2F87C, 'M', u'嵃'), + (0x2F87D, 'M', u'𡷦'), + (0x2F87E, 'M', u'嵮'), + (0x2F87F, 'M', u'嵫'), + (0x2F880, 'M', u'嵼'), + (0x2F881, 'M', u'巡'), + (0x2F882, 'M', u'巢'), + (0x2F883, 'M', u'㠯'), + (0x2F884, 'M', u'巽'), + (0x2F885, 'M', u'帨'), + (0x2F886, 'M', u'帽'), + (0x2F887, 'M', u'幩'), + (0x2F888, 'M', u'㡢'), + (0x2F889, 'M', u'𢆃'), + ] + +def _seg_74(): + return [ + (0x2F88A, 'M', u'㡼'), + (0x2F88B, 'M', u'庰'), + (0x2F88C, 'M', u'庳'), + (0x2F88D, 'M', u'庶'), + (0x2F88E, 'M', u'廊'), + (0x2F88F, 'M', u'𪎒'), + (0x2F890, 'M', u'廾'), + (0x2F891, 'M', u'𢌱'), + (0x2F893, 'M', u'舁'), + (0x2F894, 'M', u'弢'), + (0x2F896, 'M', u'㣇'), + (0x2F897, 'M', u'𣊸'), + (0x2F898, 'M', u'𦇚'), + (0x2F899, 'M', u'形'), + (0x2F89A, 'M', u'彫'), + (0x2F89B, 'M', u'㣣'), + (0x2F89C, 'M', u'徚'), + (0x2F89D, 'M', u'忍'), + (0x2F89E, 'M', u'志'), + (0x2F89F, 'M', u'忹'), + (0x2F8A0, 'M', u'悁'), + (0x2F8A1, 'M', u'㤺'), + (0x2F8A2, 'M', u'㤜'), + (0x2F8A3, 'M', u'悔'), + (0x2F8A4, 'M', u'𢛔'), + (0x2F8A5, 'M', u'惇'), + (0x2F8A6, 'M', u'慈'), + (0x2F8A7, 'M', u'慌'), + (0x2F8A8, 'M', u'慎'), + (0x2F8A9, 'M', u'慌'), + (0x2F8AA, 'M', u'慺'), + (0x2F8AB, 'M', u'憎'), + (0x2F8AC, 'M', u'憲'), + (0x2F8AD, 'M', u'憤'), + (0x2F8AE, 'M', u'憯'), + (0x2F8AF, 'M', u'懞'), + (0x2F8B0, 'M', u'懲'), + (0x2F8B1, 'M', u'懶'), + (0x2F8B2, 'M', u'成'), + (0x2F8B3, 'M', u'戛'), + (0x2F8B4, 'M', u'扝'), + (0x2F8B5, 'M', u'抱'), + (0x2F8B6, 'M', u'拔'), + (0x2F8B7, 'M', u'捐'), + (0x2F8B8, 'M', u'𢬌'), + (0x2F8B9, 'M', u'挽'), + (0x2F8BA, 'M', u'拼'), + (0x2F8BB, 'M', u'捨'), + (0x2F8BC, 'M', u'掃'), + (0x2F8BD, 'M', u'揤'), + (0x2F8BE, 'M', u'𢯱'), + (0x2F8BF, 'M', u'搢'), + (0x2F8C0, 'M', u'揅'), + (0x2F8C1, 'M', u'掩'), + (0x2F8C2, 'M', u'㨮'), + (0x2F8C3, 'M', u'摩'), + (0x2F8C4, 'M', u'摾'), + (0x2F8C5, 'M', u'撝'), + (0x2F8C6, 'M', u'摷'), + (0x2F8C7, 'M', u'㩬'), + (0x2F8C8, 'M', u'敏'), + (0x2F8C9, 'M', u'敬'), + (0x2F8CA, 'M', u'𣀊'), + (0x2F8CB, 'M', u'旣'), + (0x2F8CC, 'M', u'書'), + (0x2F8CD, 'M', u'晉'), + (0x2F8CE, 'M', u'㬙'), + (0x2F8CF, 'M', u'暑'), + (0x2F8D0, 'M', u'㬈'), + (0x2F8D1, 'M', u'㫤'), + (0x2F8D2, 'M', u'冒'), + (0x2F8D3, 'M', u'冕'), + (0x2F8D4, 'M', u'最'), + (0x2F8D5, 'M', u'暜'), + (0x2F8D6, 'M', u'肭'), + (0x2F8D7, 'M', u'䏙'), + (0x2F8D8, 'M', u'朗'), + (0x2F8D9, 'M', u'望'), + (0x2F8DA, 'M', u'朡'), + (0x2F8DB, 'M', u'杞'), + (0x2F8DC, 'M', u'杓'), + (0x2F8DD, 'M', u'𣏃'), + (0x2F8DE, 'M', u'㭉'), + (0x2F8DF, 'M', u'柺'), + (0x2F8E0, 'M', u'枅'), + (0x2F8E1, 'M', u'桒'), + (0x2F8E2, 'M', u'梅'), + (0x2F8E3, 'M', u'𣑭'), + (0x2F8E4, 'M', u'梎'), + (0x2F8E5, 'M', u'栟'), + (0x2F8E6, 'M', u'椔'), + (0x2F8E7, 'M', u'㮝'), + (0x2F8E8, 'M', u'楂'), + (0x2F8E9, 'M', u'榣'), + (0x2F8EA, 'M', u'槪'), + (0x2F8EB, 'M', u'檨'), + (0x2F8EC, 'M', u'𣚣'), + (0x2F8ED, 'M', u'櫛'), + (0x2F8EE, 'M', u'㰘'), + (0x2F8EF, 'M', u'次'), + ] + +def _seg_75(): + return [ + (0x2F8F0, 'M', u'𣢧'), + (0x2F8F1, 'M', u'歔'), + (0x2F8F2, 'M', u'㱎'), + (0x2F8F3, 'M', u'歲'), + (0x2F8F4, 'M', u'殟'), + (0x2F8F5, 'M', u'殺'), + (0x2F8F6, 'M', u'殻'), + (0x2F8F7, 'M', u'𣪍'), + (0x2F8F8, 'M', u'𡴋'), + (0x2F8F9, 'M', u'𣫺'), + (0x2F8FA, 'M', u'汎'), + (0x2F8FB, 'M', u'𣲼'), + (0x2F8FC, 'M', u'沿'), + (0x2F8FD, 'M', u'泍'), + (0x2F8FE, 'M', u'汧'), + (0x2F8FF, 'M', u'洖'), + (0x2F900, 'M', u'派'), + (0x2F901, 'M', u'海'), + (0x2F902, 'M', u'流'), + (0x2F903, 'M', u'浩'), + (0x2F904, 'M', u'浸'), + (0x2F905, 'M', u'涅'), + (0x2F906, 'M', u'𣴞'), + (0x2F907, 'M', u'洴'), + (0x2F908, 'M', u'港'), + (0x2F909, 'M', u'湮'), + (0x2F90A, 'M', u'㴳'), + (0x2F90B, 'M', u'滋'), + (0x2F90C, 'M', u'滇'), + (0x2F90D, 'M', u'𣻑'), + (0x2F90E, 'M', u'淹'), + (0x2F90F, 'M', u'潮'), + (0x2F910, 'M', u'𣽞'), + (0x2F911, 'M', u'𣾎'), + (0x2F912, 'M', u'濆'), + (0x2F913, 'M', u'瀹'), + (0x2F914, 'M', u'瀞'), + (0x2F915, 'M', u'瀛'), + (0x2F916, 'M', u'㶖'), + (0x2F917, 'M', u'灊'), + (0x2F918, 'M', u'災'), + (0x2F919, 'M', u'灷'), + (0x2F91A, 'M', u'炭'), + (0x2F91B, 'M', u'𠔥'), + (0x2F91C, 'M', u'煅'), + (0x2F91D, 'M', u'𤉣'), + (0x2F91E, 'M', u'熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', u'爨'), + (0x2F921, 'M', u'爵'), + (0x2F922, 'M', u'牐'), + (0x2F923, 'M', u'𤘈'), + (0x2F924, 'M', u'犀'), + (0x2F925, 'M', u'犕'), + (0x2F926, 'M', u'𤜵'), + (0x2F927, 'M', u'𤠔'), + (0x2F928, 'M', u'獺'), + (0x2F929, 'M', u'王'), + (0x2F92A, 'M', u'㺬'), + (0x2F92B, 'M', u'玥'), + (0x2F92C, 'M', u'㺸'), + (0x2F92E, 'M', u'瑇'), + (0x2F92F, 'M', u'瑜'), + (0x2F930, 'M', u'瑱'), + (0x2F931, 'M', u'璅'), + (0x2F932, 'M', u'瓊'), + (0x2F933, 'M', u'㼛'), + (0x2F934, 'M', u'甤'), + (0x2F935, 'M', u'𤰶'), + (0x2F936, 'M', u'甾'), + (0x2F937, 'M', u'𤲒'), + (0x2F938, 'M', u'異'), + (0x2F939, 'M', u'𢆟'), + (0x2F93A, 'M', u'瘐'), + (0x2F93B, 'M', u'𤾡'), + (0x2F93C, 'M', u'𤾸'), + (0x2F93D, 'M', u'𥁄'), + (0x2F93E, 'M', u'㿼'), + (0x2F93F, 'M', u'䀈'), + (0x2F940, 'M', u'直'), + (0x2F941, 'M', u'𥃳'), + (0x2F942, 'M', u'𥃲'), + (0x2F943, 'M', u'𥄙'), + (0x2F944, 'M', u'𥄳'), + (0x2F945, 'M', u'眞'), + (0x2F946, 'M', u'真'), + (0x2F948, 'M', u'睊'), + (0x2F949, 'M', u'䀹'), + (0x2F94A, 'M', u'瞋'), + (0x2F94B, 'M', u'䁆'), + (0x2F94C, 'M', u'䂖'), + (0x2F94D, 'M', u'𥐝'), + (0x2F94E, 'M', u'硎'), + (0x2F94F, 'M', u'碌'), + (0x2F950, 'M', u'磌'), + (0x2F951, 'M', u'䃣'), + (0x2F952, 'M', u'𥘦'), + (0x2F953, 'M', u'祖'), + (0x2F954, 'M', u'𥚚'), + (0x2F955, 'M', u'𥛅'), + ] + +def _seg_76(): + return [ + (0x2F956, 'M', u'福'), + (0x2F957, 'M', u'秫'), + (0x2F958, 'M', u'䄯'), + (0x2F959, 'M', u'穀'), + (0x2F95A, 'M', u'穊'), + (0x2F95B, 'M', u'穏'), + (0x2F95C, 'M', u'𥥼'), + (0x2F95D, 'M', u'𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', u'䈂'), + (0x2F961, 'M', u'𥮫'), + (0x2F962, 'M', u'篆'), + (0x2F963, 'M', u'築'), + (0x2F964, 'M', u'䈧'), + (0x2F965, 'M', u'𥲀'), + (0x2F966, 'M', u'糒'), + (0x2F967, 'M', u'䊠'), + (0x2F968, 'M', u'糨'), + (0x2F969, 'M', u'糣'), + (0x2F96A, 'M', u'紀'), + (0x2F96B, 'M', u'𥾆'), + (0x2F96C, 'M', u'絣'), + (0x2F96D, 'M', u'䌁'), + (0x2F96E, 'M', u'緇'), + (0x2F96F, 'M', u'縂'), + (0x2F970, 'M', u'繅'), + (0x2F971, 'M', u'䌴'), + (0x2F972, 'M', u'𦈨'), + (0x2F973, 'M', u'𦉇'), + (0x2F974, 'M', u'䍙'), + (0x2F975, 'M', u'𦋙'), + (0x2F976, 'M', u'罺'), + (0x2F977, 'M', u'𦌾'), + (0x2F978, 'M', u'羕'), + (0x2F979, 'M', u'翺'), + (0x2F97A, 'M', u'者'), + (0x2F97B, 'M', u'𦓚'), + (0x2F97C, 'M', u'𦔣'), + (0x2F97D, 'M', u'聠'), + (0x2F97E, 'M', u'𦖨'), + (0x2F97F, 'M', u'聰'), + (0x2F980, 'M', u'𣍟'), + (0x2F981, 'M', u'䏕'), + (0x2F982, 'M', u'育'), + (0x2F983, 'M', u'脃'), + (0x2F984, 'M', u'䐋'), + (0x2F985, 'M', u'脾'), + (0x2F986, 'M', u'媵'), + (0x2F987, 'M', u'𦞧'), + (0x2F988, 'M', u'𦞵'), + (0x2F989, 'M', u'𣎓'), + (0x2F98A, 'M', u'𣎜'), + (0x2F98B, 'M', u'舁'), + (0x2F98C, 'M', u'舄'), + (0x2F98D, 'M', u'辞'), + (0x2F98E, 'M', u'䑫'), + (0x2F98F, 'M', u'芑'), + (0x2F990, 'M', u'芋'), + (0x2F991, 'M', u'芝'), + (0x2F992, 'M', u'劳'), + (0x2F993, 'M', u'花'), + (0x2F994, 'M', u'芳'), + (0x2F995, 'M', u'芽'), + (0x2F996, 'M', u'苦'), + (0x2F997, 'M', u'𦬼'), + (0x2F998, 'M', u'若'), + (0x2F999, 'M', u'茝'), + (0x2F99A, 'M', u'荣'), + (0x2F99B, 'M', u'莭'), + (0x2F99C, 'M', u'茣'), + (0x2F99D, 'M', u'莽'), + (0x2F99E, 'M', u'菧'), + (0x2F99F, 'M', u'著'), + (0x2F9A0, 'M', u'荓'), + (0x2F9A1, 'M', u'菊'), + (0x2F9A2, 'M', u'菌'), + (0x2F9A3, 'M', u'菜'), + (0x2F9A4, 'M', u'𦰶'), + (0x2F9A5, 'M', u'𦵫'), + (0x2F9A6, 'M', u'𦳕'), + (0x2F9A7, 'M', u'䔫'), + (0x2F9A8, 'M', u'蓱'), + (0x2F9A9, 'M', u'蓳'), + (0x2F9AA, 'M', u'蔖'), + (0x2F9AB, 'M', u'𧏊'), + (0x2F9AC, 'M', u'蕤'), + (0x2F9AD, 'M', u'𦼬'), + (0x2F9AE, 'M', u'䕝'), + (0x2F9AF, 'M', u'䕡'), + (0x2F9B0, 'M', u'𦾱'), + (0x2F9B1, 'M', u'𧃒'), + (0x2F9B2, 'M', u'䕫'), + (0x2F9B3, 'M', u'虐'), + (0x2F9B4, 'M', u'虜'), + (0x2F9B5, 'M', u'虧'), + (0x2F9B6, 'M', u'虩'), + (0x2F9B7, 'M', u'蚩'), + (0x2F9B8, 'M', u'蚈'), + (0x2F9B9, 'M', u'蜎'), + (0x2F9BA, 'M', u'蛢'), + ] + +def _seg_77(): + return [ + (0x2F9BB, 'M', u'蝹'), + (0x2F9BC, 'M', u'蜨'), + (0x2F9BD, 'M', u'蝫'), + (0x2F9BE, 'M', u'螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', u'蟡'), + (0x2F9C1, 'M', u'蠁'), + (0x2F9C2, 'M', u'䗹'), + (0x2F9C3, 'M', u'衠'), + (0x2F9C4, 'M', u'衣'), + (0x2F9C5, 'M', u'𧙧'), + (0x2F9C6, 'M', u'裗'), + (0x2F9C7, 'M', u'裞'), + (0x2F9C8, 'M', u'䘵'), + (0x2F9C9, 'M', u'裺'), + (0x2F9CA, 'M', u'㒻'), + (0x2F9CB, 'M', u'𧢮'), + (0x2F9CC, 'M', u'𧥦'), + (0x2F9CD, 'M', u'䚾'), + (0x2F9CE, 'M', u'䛇'), + (0x2F9CF, 'M', u'誠'), + (0x2F9D0, 'M', u'諭'), + (0x2F9D1, 'M', u'變'), + (0x2F9D2, 'M', u'豕'), + (0x2F9D3, 'M', u'𧲨'), + (0x2F9D4, 'M', u'貫'), + (0x2F9D5, 'M', u'賁'), + (0x2F9D6, 'M', u'贛'), + (0x2F9D7, 'M', u'起'), + (0x2F9D8, 'M', u'𧼯'), + (0x2F9D9, 'M', u'𠠄'), + (0x2F9DA, 'M', u'跋'), + (0x2F9DB, 'M', u'趼'), + (0x2F9DC, 'M', u'跰'), + (0x2F9DD, 'M', u'𠣞'), + (0x2F9DE, 'M', u'軔'), + (0x2F9DF, 'M', u'輸'), + (0x2F9E0, 'M', u'𨗒'), + (0x2F9E1, 'M', u'𨗭'), + (0x2F9E2, 'M', u'邔'), + (0x2F9E3, 'M', u'郱'), + (0x2F9E4, 'M', u'鄑'), + (0x2F9E5, 'M', u'𨜮'), + (0x2F9E6, 'M', u'鄛'), + (0x2F9E7, 'M', u'鈸'), + (0x2F9E8, 'M', u'鋗'), + (0x2F9E9, 'M', u'鋘'), + (0x2F9EA, 'M', u'鉼'), + (0x2F9EB, 'M', u'鏹'), + (0x2F9EC, 'M', u'鐕'), + (0x2F9ED, 'M', u'𨯺'), + (0x2F9EE, 'M', u'開'), + (0x2F9EF, 'M', u'䦕'), + (0x2F9F0, 'M', u'閷'), + (0x2F9F1, 'M', u'𨵷'), + (0x2F9F2, 'M', u'䧦'), + (0x2F9F3, 'M', u'雃'), + (0x2F9F4, 'M', u'嶲'), + (0x2F9F5, 'M', u'霣'), + (0x2F9F6, 'M', u'𩅅'), + (0x2F9F7, 'M', u'𩈚'), + (0x2F9F8, 'M', u'䩮'), + (0x2F9F9, 'M', u'䩶'), + (0x2F9FA, 'M', u'韠'), + (0x2F9FB, 'M', u'𩐊'), + (0x2F9FC, 'M', u'䪲'), + (0x2F9FD, 'M', u'𩒖'), + (0x2F9FE, 'M', u'頋'), + (0x2FA00, 'M', u'頩'), + (0x2FA01, 'M', u'𩖶'), + (0x2FA02, 'M', u'飢'), + (0x2FA03, 'M', u'䬳'), + (0x2FA04, 'M', u'餩'), + (0x2FA05, 'M', u'馧'), + (0x2FA06, 'M', u'駂'), + (0x2FA07, 'M', u'駾'), + (0x2FA08, 'M', u'䯎'), + (0x2FA09, 'M', u'𩬰'), + (0x2FA0A, 'M', u'鬒'), + (0x2FA0B, 'M', u'鱀'), + (0x2FA0C, 'M', u'鳽'), + (0x2FA0D, 'M', u'䳎'), + (0x2FA0E, 'M', u'䳭'), + (0x2FA0F, 'M', u'鵧'), + (0x2FA10, 'M', u'𪃎'), + (0x2FA11, 'M', u'䳸'), + (0x2FA12, 'M', u'𪄅'), + (0x2FA13, 'M', u'𪈎'), + (0x2FA14, 'M', u'𪊑'), + (0x2FA15, 'M', u'麻'), + (0x2FA16, 'M', u'䵖'), + (0x2FA17, 'M', u'黹'), + (0x2FA18, 'M', u'黾'), + (0x2FA19, 'M', u'鼅'), + (0x2FA1A, 'M', u'鼏'), + (0x2FA1B, 'M', u'鼖'), + (0x2FA1C, 'M', u'鼻'), + (0x2FA1D, 'M', u'𪘀'), + (0x2FA1E, 'X'), + (0xE0100, 'I'), + ] + +def _seg_78(): + return [ + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() +) diff --git a/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/INSTALLER b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/LICENSE.rst b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/LICENSE.rst new file mode 100644 index 0000000..ef9c194 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/LICENSE.rst @@ -0,0 +1,47 @@ +`BSD 3-Clause `_ + +Copyright © 2011 by the Pallets team. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +- Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +- Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +We kindly ask you to use these themes in an unmodified manner only with +Pallets and Pallets-related projects, not for unrelated projects. If you +like the visual style and want to use it for your own projects, please +consider making some larger changes to the themes (such as changing font +faces, sizes, colors or margins). + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + +---- + +The initial implementation of itsdangerous was inspired by Django's +signing module. + +Copyright © Django Software Foundation and individual contributors. +All rights reserved. diff --git a/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/METADATA b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/METADATA new file mode 100644 index 0000000..7389a4d --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/METADATA @@ -0,0 +1,98 @@ +Metadata-Version: 2.1 +Name: itsdangerous +Version: 1.1.0 +Summary: Various helpers to pass data to untrusted environments and back. +Home-page: https://palletsprojects.com/p/itsdangerous/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets Team +Maintainer-email: contact@palletsprojects.com +License: BSD +Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/itsdangerous +Project-URL: Issue tracker, https://github.com/pallets/itsdangerous/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + +itsdangerous +============ + +... so better sign this + +Various helpers to pass data to untrusted environments and to get it +back safe and sound. Data is cryptographically signed to ensure that a +token has not been tampered with. + +It's possible to customize how data is serialized. Data is compressed as +needed. A timestamp can be added and verified automatically while +loading a token. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U itsdangerous + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +A Simple Example +---------------- + +Here's how you could generate a token for transmitting a user's id and +name between web requests. + +.. code-block:: python + + from itsdangerous import URLSafeSerializer + auth_s = URLSafeSerializer("secret key", "auth") + token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) + + print(token) + # eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg + + data = auth_s.loads(token) + print(data["name"]) + # itsdangerous + + +Donate +------ + +The Pallets organization develops and supports itsdangerous and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +* Website: https://palletsprojects.com/p/itsdangerous/ +* Documentation: https://itsdangerous.palletsprojects.com/ +* License: `BSD `_ +* Releases: https://pypi.org/project/itsdangerous/ +* Code: https://github.com/pallets/itsdangerous +* Issue tracker: https://github.com/pallets/itsdangerous/issues +* Test status: https://travis-ci.org/pallets/itsdangerous +* Test coverage: https://codecov.io/gh/pallets/itsdangerous + + diff --git a/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/RECORD b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/RECORD new file mode 100644 index 0000000..79baf65 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/RECORD @@ -0,0 +1,26 @@ +itsdangerous/__init__.py,sha256=Dr-SkfFdOyiR_WjiqIXnlFpYRMW0XvPBNV5muzE5N_A,708 +itsdangerous/_compat.py,sha256=oAAMcQAjwQXQpIbuHT3o-aL56ztm_7Fe-4lD7IteF6A,1133 +itsdangerous/_json.py,sha256=W7BLL4RPnSOjNdo2gfKT3BeARMCIikY6O75rwWV0XoE,431 +itsdangerous/encoding.py,sha256=KhY85PsH3bGHe5JANN4LMZ_3b0IwUWRRnnw1wvLlaIg,1224 +itsdangerous/exc.py,sha256=KFxg7K2XMliMQAxL4jkRNgE8e73z2jcRaLrzwqVObnI,2959 +itsdangerous/jws.py,sha256=6Lh9W-Lu8D9s7bRazs0Zb35eyAZm3pzLeZqHmRELeII,7470 +itsdangerous/serializer.py,sha256=bT-dfjKec9zcKa8Qo8n7mHW_8M-XCTPMOFq1TQI_Fv4,8653 +itsdangerous/signer.py,sha256=OOZbK8XomBjQfOFEul8osesn7fc80MXB0L1r7E86_GQ,6345 +itsdangerous/timed.py,sha256=on5Q5lX7LT_LaETOhzF1ZmrRbia8P98263R8FiRyM6Y,5635 +itsdangerous/url_safe.py,sha256=xnFTaukIPmW6Qwn6uNQLgzdau8RuAKnp5N7ukuXykj0,2275 +itsdangerous-1.1.0.dist-info/LICENSE.rst,sha256=_rKL-jSNgWsOfbrt3xhJnufoAHxngT241qs3xl4EbNQ,2120 +itsdangerous-1.1.0.dist-info/METADATA,sha256=yyKjL2WOg_WybH2Yt-7NIvGpV3B93IsMc2HbToWc7Sk,3062 +itsdangerous-1.1.0.dist-info/WHEEL,sha256=CihQvCnsGZQBGAHLEUMf0IdA4fRduS_NBUTMgCTtvPM,110 +itsdangerous-1.1.0.dist-info/top_level.txt,sha256=gKN1OKLk81i7fbWWildJA88EQ9NhnGMSvZqhfz9ICjk,13 +itsdangerous-1.1.0.dist-info/RECORD,, +itsdangerous-1.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +itsdangerous/__pycache__/jws.cpython-36.pyc,, +itsdangerous/__pycache__/_compat.cpython-36.pyc,, +itsdangerous/__pycache__/url_safe.cpython-36.pyc,, +itsdangerous/__pycache__/encoding.cpython-36.pyc,, +itsdangerous/__pycache__/_json.cpython-36.pyc,, +itsdangerous/__pycache__/timed.cpython-36.pyc,, +itsdangerous/__pycache__/serializer.cpython-36.pyc,, +itsdangerous/__pycache__/__init__.cpython-36.pyc,, +itsdangerous/__pycache__/exc.cpython-36.pyc,, +itsdangerous/__pycache__/signer.cpython-36.pyc,, diff --git a/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/WHEEL b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/WHEEL new file mode 100644 index 0000000..dea0e20 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/top_level.txt b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/top_level.txt new file mode 100644 index 0000000..e163955 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +itsdangerous diff --git a/py3/lib/python3.6/site-packages/itsdangerous/__init__.py b/py3/lib/python3.6/site-packages/itsdangerous/__init__.py new file mode 100644 index 0000000..0fcd8c1 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/__init__.py @@ -0,0 +1,22 @@ +from ._json import json +from .encoding import base64_decode +from .encoding import base64_encode +from .encoding import want_bytes +from .exc import BadData +from .exc import BadHeader +from .exc import BadPayload +from .exc import BadSignature +from .exc import BadTimeSignature +from .exc import SignatureExpired +from .jws import JSONWebSignatureSerializer +from .jws import TimedJSONWebSignatureSerializer +from .serializer import Serializer +from .signer import HMACAlgorithm +from .signer import NoneAlgorithm +from .signer import Signer +from .timed import TimedSerializer +from .timed import TimestampSigner +from .url_safe import URLSafeSerializer +from .url_safe import URLSafeTimedSerializer + +__version__ = "1.1.0" diff --git a/py3/lib/python3.6/site-packages/itsdangerous/_compat.py b/py3/lib/python3.6/site-packages/itsdangerous/_compat.py new file mode 100644 index 0000000..2291bce --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/_compat.py @@ -0,0 +1,46 @@ +import decimal +import hmac +import numbers +import sys + +PY2 = sys.version_info[0] == 2 + +if PY2: + from itertools import izip + + text_type = unicode # noqa: 821 +else: + izip = zip + text_type = str + +number_types = (numbers.Real, decimal.Decimal) + + +def _constant_time_compare(val1, val2): + """Return ``True`` if the two strings are equal, ``False`` + otherwise. + + The time taken is independent of the number of characters that + match. Do not use this function for anything else than comparision + with known length targets. + + This is should be implemented in C in order to get it completely + right. + + This is an alias of :func:`hmac.compare_digest` on Python>=2.7,3.3. + """ + len_eq = len(val1) == len(val2) + if len_eq: + result = 0 + left = val1 + else: + result = 1 + left = val2 + for x, y in izip(bytearray(left), bytearray(val2)): + result |= x ^ y + return result == 0 + + +# Starting with 2.7/3.3 the standard library has a c-implementation for +# constant time string compares. +constant_time_compare = getattr(hmac, "compare_digest", _constant_time_compare) diff --git a/py3/lib/python3.6/site-packages/itsdangerous/_json.py b/py3/lib/python3.6/site-packages/itsdangerous/_json.py new file mode 100644 index 0000000..426b36e --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/_json.py @@ -0,0 +1,18 @@ +try: + import simplejson as json +except ImportError: + import json + + +class _CompactJSON(object): + """Wrapper around json module that strips whitespace.""" + + @staticmethod + def loads(payload): + return json.loads(payload) + + @staticmethod + def dumps(obj, **kwargs): + kwargs.setdefault("ensure_ascii", False) + kwargs.setdefault("separators", (",", ":")) + return json.dumps(obj, **kwargs) diff --git a/py3/lib/python3.6/site-packages/itsdangerous/encoding.py b/py3/lib/python3.6/site-packages/itsdangerous/encoding.py new file mode 100644 index 0000000..1e28969 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/encoding.py @@ -0,0 +1,49 @@ +import base64 +import string +import struct + +from ._compat import text_type +from .exc import BadData + + +def want_bytes(s, encoding="utf-8", errors="strict"): + if isinstance(s, text_type): + s = s.encode(encoding, errors) + return s + + +def base64_encode(string): + """Base64 encode a string of bytes or text. The resulting bytes are + safe to use in URLs. + """ + string = want_bytes(string) + return base64.urlsafe_b64encode(string).rstrip(b"=") + + +def base64_decode(string): + """Base64 decode a URL-safe string of bytes or text. The result is + bytes. + """ + string = want_bytes(string, encoding="ascii", errors="ignore") + string += b"=" * (-len(string) % 4) + + try: + return base64.urlsafe_b64decode(string) + except (TypeError, ValueError): + raise BadData("Invalid base64-encoded data") + + +# The alphabet used by base64.urlsafe_* +_base64_alphabet = (string.ascii_letters + string.digits + "-_=").encode("ascii") + +_int64_struct = struct.Struct(">Q") +_int_to_bytes = _int64_struct.pack +_bytes_to_int = _int64_struct.unpack + + +def int_to_bytes(num): + return _int_to_bytes(num).lstrip(b"\x00") + + +def bytes_to_int(bytestr): + return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/py3/lib/python3.6/site-packages/itsdangerous/exc.py b/py3/lib/python3.6/site-packages/itsdangerous/exc.py new file mode 100644 index 0000000..287d691 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/exc.py @@ -0,0 +1,98 @@ +from ._compat import PY2 +from ._compat import text_type + + +class BadData(Exception): + """Raised if bad data of any sort was encountered. This is the base + for all exceptions that itsdangerous defines. + + .. versionadded:: 0.15 + """ + + message = None + + def __init__(self, message): + super(BadData, self).__init__(self, message) + self.message = message + + def __str__(self): + return text_type(self.message) + + if PY2: + __unicode__ = __str__ + + def __str__(self): + return self.__unicode__().encode("utf-8") + + +class BadSignature(BadData): + """Raised if a signature does not match.""" + + def __init__(self, message, payload=None): + BadData.__init__(self, message) + + #: The payload that failed the signature test. In some + #: situations you might still want to inspect this, even if + #: you know it was tampered with. + #: + #: .. versionadded:: 0.14 + self.payload = payload + + +class BadTimeSignature(BadSignature): + """Raised if a time-based signature is invalid. This is a subclass + of :class:`BadSignature`. + """ + + def __init__(self, message, payload=None, date_signed=None): + BadSignature.__init__(self, message, payload) + + #: If the signature expired this exposes the date of when the + #: signature was created. This can be helpful in order to + #: tell the user how long a link has been gone stale. + #: + #: .. versionadded:: 0.14 + self.date_signed = date_signed + + +class SignatureExpired(BadTimeSignature): + """Raised if a signature timestamp is older than ``max_age``. This + is a subclass of :exc:`BadTimeSignature`. + """ + + +class BadHeader(BadSignature): + """Raised if a signed header is invalid in some form. This only + happens for serializers that have a header that goes with the + signature. + + .. versionadded:: 0.24 + """ + + def __init__(self, message, payload=None, header=None, original_error=None): + BadSignature.__init__(self, message, payload) + + #: If the header is actually available but just malformed it + #: might be stored here. + self.header = header + + #: If available, the error that indicates why the payload was + #: not valid. This might be ``None``. + self.original_error = original_error + + +class BadPayload(BadData): + """Raised if a payload is invalid. This could happen if the payload + is loaded despite an invalid signature, or if there is a mismatch + between the serializer and deserializer. The original exception + that occurred during loading is stored on as :attr:`original_error`. + + .. versionadded:: 0.15 + """ + + def __init__(self, message, original_error=None): + BadData.__init__(self, message) + + #: If available, the error that indicates why the payload was + #: not valid. This might be ``None``. + self.original_error = original_error diff --git a/py3/lib/python3.6/site-packages/itsdangerous/jws.py b/py3/lib/python3.6/site-packages/itsdangerous/jws.py new file mode 100644 index 0000000..92e9ec8 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/jws.py @@ -0,0 +1,218 @@ +import hashlib +import time +from datetime import datetime + +from ._compat import number_types +from ._json import _CompactJSON +from ._json import json +from .encoding import base64_decode +from .encoding import base64_encode +from .encoding import want_bytes +from .exc import BadData +from .exc import BadHeader +from .exc import BadPayload +from .exc import BadSignature +from .exc import SignatureExpired +from .serializer import Serializer +from .signer import HMACAlgorithm +from .signer import NoneAlgorithm + + +class JSONWebSignatureSerializer(Serializer): + """This serializer implements JSON Web Signature (JWS) support. Only + supports the JWS Compact Serialization. + """ + + jws_algorithms = { + "HS256": HMACAlgorithm(hashlib.sha256), + "HS384": HMACAlgorithm(hashlib.sha384), + "HS512": HMACAlgorithm(hashlib.sha512), + "none": NoneAlgorithm(), + } + + #: The default algorithm to use for signature generation + default_algorithm = "HS512" + + default_serializer = _CompactJSON + + def __init__( + self, + secret_key, + salt=None, + serializer=None, + serializer_kwargs=None, + signer=None, + signer_kwargs=None, + algorithm_name=None, + ): + Serializer.__init__( + self, + secret_key=secret_key, + salt=salt, + serializer=serializer, + serializer_kwargs=serializer_kwargs, + signer=signer, + signer_kwargs=signer_kwargs, + ) + if algorithm_name is None: + algorithm_name = self.default_algorithm + self.algorithm_name = algorithm_name + self.algorithm = self.make_algorithm(algorithm_name) + + def load_payload(self, payload, serializer=None, return_header=False): + payload = want_bytes(payload) + if b"." not in payload: + raise BadPayload('No "." found in value') + base64d_header, base64d_payload = payload.split(b".", 1) + try: + json_header = base64_decode(base64d_header) + except Exception as e: + raise BadHeader( + "Could not base64 decode the header because of an exception", + original_error=e, + ) + try: + json_payload = base64_decode(base64d_payload) + except Exception as e: + raise BadPayload( + "Could not base64 decode the payload because of an exception", + original_error=e, + ) + try: + header = Serializer.load_payload(self, json_header, serializer=json) + except BadData as e: + raise BadHeader( + "Could not unserialize header because it was malformed", + original_error=e, + ) + if not isinstance(header, dict): + raise BadHeader("Header payload is not a JSON object", header=header) + payload = Serializer.load_payload(self, json_payload, serializer=serializer) + if return_header: + return payload, header + return payload + + def dump_payload(self, header, obj): + base64d_header = base64_encode( + self.serializer.dumps(header, **self.serializer_kwargs) + ) + base64d_payload = base64_encode( + self.serializer.dumps(obj, **self.serializer_kwargs) + ) + return base64d_header + b"." + base64d_payload + + def make_algorithm(self, algorithm_name): + try: + return self.jws_algorithms[algorithm_name] + except KeyError: + raise NotImplementedError("Algorithm not supported") + + def make_signer(self, salt=None, algorithm=None): + if salt is None: + salt = self.salt + key_derivation = "none" if salt is None else None + if algorithm is None: + algorithm = self.algorithm + return self.signer( + self.secret_key, + salt=salt, + sep=".", + key_derivation=key_derivation, + algorithm=algorithm, + ) + + def make_header(self, header_fields): + header = header_fields.copy() if header_fields else {} + header["alg"] = self.algorithm_name + return header + + def dumps(self, obj, salt=None, header_fields=None): + """Like :meth:`.Serializer.dumps` but creates a JSON Web + Signature. It also allows for specifying additional fields to be + included in the JWS header. + """ + header = self.make_header(header_fields) + signer = self.make_signer(salt, self.algorithm) + return signer.sign(self.dump_payload(header, obj)) + + def loads(self, s, salt=None, return_header=False): + """Reverse of :meth:`dumps`. If requested via ``return_header`` + it will return a tuple of payload and header. + """ + payload, header = self.load_payload( + self.make_signer(salt, self.algorithm).unsign(want_bytes(s)), + return_header=True, + ) + if header.get("alg") != self.algorithm_name: + raise BadHeader("Algorithm mismatch", header=header, payload=payload) + if return_header: + return payload, header + return payload + + def loads_unsafe(self, s, salt=None, return_header=False): + kwargs = {"return_header": return_header} + return self._loads_unsafe_impl(s, salt, kwargs, kwargs) + + +class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer): + """Works like the regular :class:`JSONWebSignatureSerializer` but + also records the time of the signing and can be used to expire + signatures. + + JWS currently does not specify this behavior but it mentions a + possible extension like this in the spec. Expiry date is encoded + into the header similar to what's specified in `draft-ietf-oauth + -json-web-token `_. + """ + + DEFAULT_EXPIRES_IN = 3600 + + def __init__(self, secret_key, expires_in=None, **kwargs): + JSONWebSignatureSerializer.__init__(self, secret_key, **kwargs) + if expires_in is None: + expires_in = self.DEFAULT_EXPIRES_IN + self.expires_in = expires_in + + def make_header(self, header_fields): + header = JSONWebSignatureSerializer.make_header(self, header_fields) + iat = self.now() + exp = iat + self.expires_in + header["iat"] = iat + header["exp"] = exp + return header + + def loads(self, s, salt=None, return_header=False): + payload, header = JSONWebSignatureSerializer.loads( + self, s, salt, return_header=True + ) + + if "exp" not in header: + raise BadSignature("Missing expiry date", payload=payload) + + int_date_error = BadHeader("Expiry date is not an IntDate", payload=payload) + try: + header["exp"] = int(header["exp"]) + except ValueError: + raise int_date_error + if header["exp"] < 0: + raise int_date_error + + if header["exp"] < self.now(): + raise SignatureExpired( + "Signature expired", + payload=payload, + date_signed=self.get_issue_date(header), + ) + + if return_header: + return payload, header + return payload + + def get_issue_date(self, header): + rv = header.get("iat") + if isinstance(rv, number_types): + return datetime.utcfromtimestamp(int(rv)) + + def now(self): + return int(time.time()) diff --git a/py3/lib/python3.6/site-packages/itsdangerous/serializer.py b/py3/lib/python3.6/site-packages/itsdangerous/serializer.py new file mode 100644 index 0000000..12c20f4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/serializer.py @@ -0,0 +1,233 @@ +import hashlib + +from ._compat import text_type +from ._json import json +from .encoding import want_bytes +from .exc import BadPayload +from .exc import BadSignature +from .signer import Signer + + +def is_text_serializer(serializer): + """Checks whether a serializer generates text or binary.""" + return isinstance(serializer.dumps({}), text_type) + + +class Serializer(object): + """This class provides a serialization interface on top of the + signer. It provides a similar API to json/pickle and other modules + but is structured differently internally. If you want to change the + underlying implementation for parsing and loading you have to + override the :meth:`load_payload` and :meth:`dump_payload` + functions. + + This implementation uses simplejson if available for dumping and + loading and will fall back to the standard library's json module if + it's not available. + + You do not need to subclass this class in order to switch out or + customize the :class:`.Signer`. You can instead pass a different + class to the constructor as well as keyword arguments as a dict that + should be forwarded. + + .. code-block:: python + + s = Serializer(signer_kwargs={'key_derivation': 'hmac'}) + + You may want to upgrade the signing parameters without invalidating + existing signatures that are in use. Fallback signatures can be + given that will be tried if unsigning with the current signer fails. + + Fallback signers can be defined by providing a list of + ``fallback_signers``. Each item can be one of the following: a + signer class (which is instantiated with ``signer_kwargs``, + ``salt``, and ``secret_key``), a tuple + ``(signer_class, signer_kwargs)``, or a dict of ``signer_kwargs``. + + For example, this is a serializer that signs using SHA-512, but will + unsign using either SHA-512 or SHA1: + + .. code-block:: python + + s = Serializer( + signer_kwargs={"digest_method": hashlib.sha512}, + fallback_signers=[{"digest_method": hashlib.sha1}] + ) + + .. versionchanged:: 0.14: + The ``signer`` and ``signer_kwargs`` parameters were added to + the constructor. + + .. versionchanged:: 1.1.0: + Added support for ``fallback_signers`` and configured a default + SHA-512 fallback. This fallback is for users who used the yanked + 1.0.0 release which defaulted to SHA-512. + """ + + #: If a serializer module or class is not passed to the constructor + #: this one is picked up. This currently defaults to :mod:`json`. + default_serializer = json + + #: The default :class:`Signer` class that is being used by this + #: serializer. + #: + #: .. versionadded:: 0.14 + default_signer = Signer + + #: The default fallback signers. + default_fallback_signers = [{"digest_method": hashlib.sha512}] + + def __init__( + self, + secret_key, + salt=b"itsdangerous", + serializer=None, + serializer_kwargs=None, + signer=None, + signer_kwargs=None, + fallback_signers=None, + ): + self.secret_key = want_bytes(secret_key) + self.salt = want_bytes(salt) + if serializer is None: + serializer = self.default_serializer + self.serializer = serializer + self.is_text_serializer = is_text_serializer(serializer) + if signer is None: + signer = self.default_signer + self.signer = signer + self.signer_kwargs = signer_kwargs or {} + if fallback_signers is None: + fallback_signers = list(self.default_fallback_signers or ()) + self.fallback_signers = fallback_signers + self.serializer_kwargs = serializer_kwargs or {} + + def load_payload(self, payload, serializer=None): + """Loads the encoded object. This function raises + :class:`.BadPayload` if the payload is not valid. The + ``serializer`` parameter can be used to override the serializer + stored on the class. The encoded ``payload`` should always be + bytes. + """ + if serializer is None: + serializer = self.serializer + is_text = self.is_text_serializer + else: + is_text = is_text_serializer(serializer) + try: + if is_text: + payload = payload.decode("utf-8") + return serializer.loads(payload) + except Exception as e: + raise BadPayload( + "Could not load the payload because an exception" + " occurred on unserializing the data.", + original_error=e, + ) + + def dump_payload(self, obj): + """Dumps the encoded object. The return value is always bytes. + If the internal serializer returns text, the value will be + encoded as UTF-8. + """ + return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) + + def make_signer(self, salt=None): + """Creates a new instance of the signer to be used. The default + implementation uses the :class:`.Signer` base class. + """ + if salt is None: + salt = self.salt + return self.signer(self.secret_key, salt=salt, **self.signer_kwargs) + + def iter_unsigners(self, salt=None): + """Iterates over all signers to be tried for unsigning. Starts + with the configured signer, then constructs each signer + specified in ``fallback_signers``. + """ + if salt is None: + salt = self.salt + yield self.make_signer(salt) + for fallback in self.fallback_signers: + if type(fallback) is dict: + kwargs = fallback + fallback = self.signer + elif type(fallback) is tuple: + fallback, kwargs = fallback + else: + kwargs = self.signer_kwargs + yield fallback(self.secret_key, salt=salt, **kwargs) + + def dumps(self, obj, salt=None): + """Returns a signed string serialized with the internal + serializer. The return value can be either a byte or unicode + string depending on the format of the internal serializer. + """ + payload = want_bytes(self.dump_payload(obj)) + rv = self.make_signer(salt).sign(payload) + if self.is_text_serializer: + rv = rv.decode("utf-8") + return rv + + def dump(self, obj, f, salt=None): + """Like :meth:`dumps` but dumps into a file. The file handle has + to be compatible with what the internal serializer expects. + """ + f.write(self.dumps(obj, salt)) + + def loads(self, s, salt=None): + """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the + signature validation fails. + """ + s = want_bytes(s) + last_exception = None + for signer in self.iter_unsigners(salt): + try: + return self.load_payload(signer.unsign(s)) + except BadSignature as err: + last_exception = err + raise last_exception + + def load(self, f, salt=None): + """Like :meth:`loads` but loads from a file.""" + return self.loads(f.read(), salt) + + def loads_unsafe(self, s, salt=None): + """Like :meth:`loads` but without verifying the signature. This + is potentially very dangerous to use depending on how your + serializer works. The return value is ``(signature_valid, + payload)`` instead of just the payload. The first item will be a + boolean that indicates if the signature is valid. This function + never fails. + + Use it for debugging only and if you know that your serializer + module is not exploitable (for example, do not use it with a + pickle serializer). + + .. versionadded:: 0.15 + """ + return self._loads_unsafe_impl(s, salt) + + def _loads_unsafe_impl(self, s, salt, load_kwargs=None, load_payload_kwargs=None): + """Low level helper function to implement :meth:`loads_unsafe` + in serializer subclasses. + """ + try: + return True, self.loads(s, salt=salt, **(load_kwargs or {})) + except BadSignature as e: + if e.payload is None: + return False, None + try: + return ( + False, + self.load_payload(e.payload, **(load_payload_kwargs or {})), + ) + except BadPayload: + return False, None + + def load_unsafe(self, f, *args, **kwargs): + """Like :meth:`loads_unsafe` but loads from a file. + + .. versionadded:: 0.15 + """ + return self.loads_unsafe(f.read(), *args, **kwargs) diff --git a/py3/lib/python3.6/site-packages/itsdangerous/signer.py b/py3/lib/python3.6/site-packages/itsdangerous/signer.py new file mode 100644 index 0000000..6bddc03 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/signer.py @@ -0,0 +1,179 @@ +import hashlib +import hmac + +from ._compat import constant_time_compare +from .encoding import _base64_alphabet +from .encoding import base64_decode +from .encoding import base64_encode +from .encoding import want_bytes +from .exc import BadSignature + + +class SigningAlgorithm(object): + """Subclasses must implement :meth:`get_signature` to provide + signature generation functionality. + """ + + def get_signature(self, key, value): + """Returns the signature for the given key and value.""" + raise NotImplementedError() + + def verify_signature(self, key, value, sig): + """Verifies the given signature matches the expected + signature. + """ + return constant_time_compare(sig, self.get_signature(key, value)) + + +class NoneAlgorithm(SigningAlgorithm): + """Provides an algorithm that does not perform any signing and + returns an empty signature. + """ + + def get_signature(self, key, value): + return b"" + + +class HMACAlgorithm(SigningAlgorithm): + """Provides signature generation using HMACs.""" + + #: The digest method to use with the MAC algorithm. This defaults to + #: SHA1, but can be changed to any other function in the hashlib + #: module. + default_digest_method = staticmethod(hashlib.sha1) + + def __init__(self, digest_method=None): + if digest_method is None: + digest_method = self.default_digest_method + self.digest_method = digest_method + + def get_signature(self, key, value): + mac = hmac.new(key, msg=value, digestmod=self.digest_method) + return mac.digest() + + +class Signer(object): + """This class can sign and unsign bytes, validating the signature + provided. + + Salt can be used to namespace the hash, so that a signed string is + only valid for a given namespace. Leaving this at the default value + or re-using a salt value across different parts of your application + where the same signed value in one part can mean something different + in another part is a security risk. + + See :ref:`the-salt` for an example of what the salt is doing and how + you can utilize it. + + .. versionadded:: 0.14 + ``key_derivation`` and ``digest_method`` were added as arguments + to the class constructor. + + .. versionadded:: 0.18 + ``algorithm`` was added as an argument to the class constructor. + """ + + #: The digest method to use for the signer. This defaults to + #: SHA1 but can be changed to any other function in the hashlib + #: module. + #: + #: .. versionadded:: 0.14 + default_digest_method = staticmethod(hashlib.sha1) + + #: Controls how the key is derived. The default is Django-style + #: concatenation. Possible values are ``concat``, ``django-concat`` + #: and ``hmac``. This is used for deriving a key from the secret key + #: with an added salt. + #: + #: .. versionadded:: 0.14 + default_key_derivation = "django-concat" + + def __init__( + self, + secret_key, + salt=None, + sep=".", + key_derivation=None, + digest_method=None, + algorithm=None, + ): + self.secret_key = want_bytes(secret_key) + self.sep = want_bytes(sep) + if self.sep in _base64_alphabet: + raise ValueError( + "The given separator cannot be used because it may be" + " contained in the signature itself. Alphanumeric" + " characters and `-_=` must not be used." + ) + self.salt = "itsdangerous.Signer" if salt is None else salt + if key_derivation is None: + key_derivation = self.default_key_derivation + self.key_derivation = key_derivation + if digest_method is None: + digest_method = self.default_digest_method + self.digest_method = digest_method + if algorithm is None: + algorithm = HMACAlgorithm(self.digest_method) + self.algorithm = algorithm + + def derive_key(self): + """This method is called to derive the key. The default key + derivation choices can be overridden here. Key derivation is not + intended to be used as a security method to make a complex key + out of a short password. Instead you should use large random + secret keys. + """ + salt = want_bytes(self.salt) + if self.key_derivation == "concat": + return self.digest_method(salt + self.secret_key).digest() + elif self.key_derivation == "django-concat": + return self.digest_method(salt + b"signer" + self.secret_key).digest() + elif self.key_derivation == "hmac": + mac = hmac.new(self.secret_key, digestmod=self.digest_method) + mac.update(salt) + return mac.digest() + elif self.key_derivation == "none": + return self.secret_key + else: + raise TypeError("Unknown key derivation method") + + def get_signature(self, value): + """Returns the signature for the given value.""" + value = want_bytes(value) + key = self.derive_key() + sig = self.algorithm.get_signature(key, value) + return base64_encode(sig) + + def sign(self, value): + """Signs the given string.""" + return want_bytes(value) + want_bytes(self.sep) + self.get_signature(value) + + def verify_signature(self, value, sig): + """Verifies the signature for the given value.""" + key = self.derive_key() + try: + sig = base64_decode(sig) + except Exception: + return False + return self.algorithm.verify_signature(key, value, sig) + + def unsign(self, signed_value): + """Unsigns the given string.""" + signed_value = want_bytes(signed_value) + sep = want_bytes(self.sep) + if sep not in signed_value: + raise BadSignature("No %r found in value" % self.sep) + value, sig = signed_value.rsplit(sep, 1) + if self.verify_signature(value, sig): + return value + raise BadSignature("Signature %r does not match" % sig, payload=value) + + def validate(self, signed_value): + """Only validates the given signed value. Returns ``True`` if + the signature exists and is valid. + """ + try: + self.unsign(signed_value) + return True + except BadSignature: + return False diff --git a/py3/lib/python3.6/site-packages/itsdangerous/timed.py b/py3/lib/python3.6/site-packages/itsdangerous/timed.py new file mode 100644 index 0000000..4c117e4 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/timed.py @@ -0,0 +1,147 @@ +import time +from datetime import datetime + +from ._compat import text_type +from .encoding import base64_decode +from .encoding import base64_encode +from .encoding import bytes_to_int +from .encoding import int_to_bytes +from .encoding import want_bytes +from .exc import BadSignature +from .exc import BadTimeSignature +from .exc import SignatureExpired +from .serializer import Serializer +from .signer import Signer + + +class TimestampSigner(Signer): + """Works like the regular :class:`.Signer` but also records the time + of the signing and can be used to expire signatures. The + :meth:`unsign` method can raise :exc:`.SignatureExpired` if the + unsigning failed because the signature is expired. + """ + + def get_timestamp(self): + """Returns the current timestamp. The function must return an + integer. + """ + return int(time.time()) + + def timestamp_to_datetime(self, ts): + """Used to convert the timestamp from :meth:`get_timestamp` into + a datetime object. + """ + return datetime.utcfromtimestamp(ts) + + def sign(self, value): + """Signs the given string and also attaches time information.""" + value = want_bytes(value) + timestamp = base64_encode(int_to_bytes(self.get_timestamp())) + sep = want_bytes(self.sep) + value = value + sep + timestamp + return value + sep + self.get_signature(value) + + def unsign(self, value, max_age=None, return_timestamp=False): + """Works like the regular :meth:`.Signer.unsign` but can also + validate the time. See the base docstring of the class for + the general behavior. If ``return_timestamp`` is ``True`` the + timestamp of the signature will be returned as a naive + :class:`datetime.datetime` object in UTC. + """ + try: + result = Signer.unsign(self, value) + sig_error = None + except BadSignature as e: + sig_error = e + result = e.payload or b"" + sep = want_bytes(self.sep) + + # If there is no timestamp in the result there is something + # seriously wrong. In case there was a signature error, we raise + # that one directly, otherwise we have a weird situation in + # which we shouldn't have come except someone uses a time-based + # serializer on non-timestamp data, so catch that. + if sep not in result: + if sig_error: + raise sig_error + raise BadTimeSignature("timestamp missing", payload=result) + + value, timestamp = result.rsplit(sep, 1) + try: + timestamp = bytes_to_int(base64_decode(timestamp)) + except Exception: + timestamp = None + + # Signature is *not* okay. Raise a proper error now that we have + # split the value and the timestamp. + if sig_error is not None: + raise BadTimeSignature( + text_type(sig_error), payload=value, date_signed=timestamp + ) + + # Signature was okay but the timestamp is actually not there or + # malformed. Should not happen, but we handle it anyway. + if timestamp is None: + raise BadTimeSignature("Malformed timestamp", payload=value) + + # Check timestamp is not older than max_age + if max_age is not None: + age = self.get_timestamp() - timestamp + if age > max_age: + raise SignatureExpired( + "Signature age %s > %s seconds" % (age, max_age), + payload=value, + date_signed=self.timestamp_to_datetime(timestamp), + ) + + if return_timestamp: + return value, self.timestamp_to_datetime(timestamp) + return value + + def validate(self, signed_value, max_age=None): + """Only validates the given signed value. Returns ``True`` if + the signature exists and is valid.""" + try: + self.unsign(signed_value, max_age=max_age) + return True + except BadSignature: + return False + + +class TimedSerializer(Serializer): + """Uses :class:`TimestampSigner` instead of the default + :class:`.Signer`. + """ + + default_signer = TimestampSigner + + def loads(self, s, max_age=None, return_timestamp=False, salt=None): + """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the + signature validation fails. If a ``max_age`` is provided it will + ensure the signature is not older than that time in seconds. In + case the signature is outdated, :exc:`.SignatureExpired` is + raised. All arguments are forwarded to the signer's + :meth:`~TimestampSigner.unsign` method. + """ + s = want_bytes(s) + last_exception = None + for signer in self.iter_unsigners(salt): + try: + base64d, timestamp = signer.unsign(s, max_age, return_timestamp=True) + payload = self.load_payload(base64d) + if return_timestamp: + return payload, timestamp + return payload + # If we get a signature expired it means we could read the + # signature but it's invalid. In that case we do not want to + # try the next signer. + except SignatureExpired: + raise + except BadSignature as err: + last_exception = err + raise last_exception + + def loads_unsafe(self, s, max_age=None, salt=None): + load_kwargs = {"max_age": max_age} + load_payload_kwargs = {} + return self._loads_unsafe_impl(s, salt, load_kwargs, load_payload_kwargs) diff --git a/py3/lib/python3.6/site-packages/itsdangerous/url_safe.py b/py3/lib/python3.6/site-packages/itsdangerous/url_safe.py new file mode 100644 index 0000000..fcaa011 --- /dev/null +++ b/py3/lib/python3.6/site-packages/itsdangerous/url_safe.py @@ -0,0 +1,65 @@ +import zlib + +from ._json import _CompactJSON +from .encoding import base64_decode +from .encoding import base64_encode +from .exc import BadPayload +from .serializer import Serializer +from .timed import TimedSerializer + + +class URLSafeSerializerMixin(object): + """Mixed in with a regular serializer it will attempt to zlib + compress the string to make it shorter if necessary. It will also + base64 encode the string so that it can safely be placed in a URL. + """ + + default_serializer = _CompactJSON + + def load_payload(self, payload, *args, **kwargs): + decompress = False + if payload.startswith(b"."): + payload = payload[1:] + decompress = True + try: + json = base64_decode(payload) + except Exception as e: + raise BadPayload( + "Could not base64 decode the payload because of an exception", + original_error=e, + ) + if decompress: + try: + json = zlib.decompress(json) + except Exception as e: + raise BadPayload( + "Could not zlib decompress the payload before decoding the payload", + original_error=e, + ) + return super(URLSafeSerializerMixin, self).load_payload(json, *args, **kwargs) + + def dump_payload(self, obj): + json = super(URLSafeSerializerMixin, self).dump_payload(obj) + is_compressed = False + compressed = zlib.compress(json) + if len(compressed) < (len(json) - 1): + json = compressed + is_compressed = True + base64d = base64_encode(json) + if is_compressed: + base64d = b"." + base64d + return base64d + + +class URLSafeSerializer(URLSafeSerializerMixin, Serializer): + """Works like :class:`.Serializer` but dumps and loads into a URL + safe string consisting of the upper and lowercase character of the + alphabet as well as ``'_'``, ``'-'`` and ``'.'``. + """ + + +class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer): + """Works like :class:`.TimedSerializer` but dumps and loads into a + URL safe string consisting of the upper and lowercase character of + the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. + """ diff --git a/py3/lib/python3.6/site-packages/jinja2/__init__.py b/py3/lib/python3.6/site-packages/jinja2/__init__.py new file mode 100644 index 0000000..15e13b6 --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/__init__.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +""" + jinja2 + ~~~~~~ + + Jinja2 is a template engine written in pure Python. It provides a + Django inspired non-XML syntax but supports inline expressions and + an optional sandboxed environment. + + Nutshell + -------- + + Here a small example of a Jinja2 template:: + + {% extends 'base.html' %} + {% block title %}Memberlist{% endblock %} + {% block content %} + + {% endblock %} + + + :copyright: (c) 2017 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +__docformat__ = 'restructuredtext en' +__version__ = '2.10.1' + +# high level interface +from jinja2.environment import Environment, Template + +# loaders +from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \ + DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \ + ModuleLoader + +# bytecode caches +from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \ + MemcachedBytecodeCache + +# undefined types +from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined, \ + make_logging_undefined + +# exceptions +from jinja2.exceptions import TemplateError, UndefinedError, \ + TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \ + TemplateAssertionError, TemplateRuntimeError + +# decorators and public utilities +from jinja2.filters import environmentfilter, contextfilter, \ + evalcontextfilter +from jinja2.utils import Markup, escape, clear_caches, \ + environmentfunction, evalcontextfunction, contextfunction, \ + is_undefined, select_autoescape + +__all__ = [ + 'Environment', 'Template', 'BaseLoader', 'FileSystemLoader', + 'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader', + 'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache', + 'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined', + 'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound', + 'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError', + 'TemplateRuntimeError', + 'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape', + 'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined', + 'evalcontextfilter', 'evalcontextfunction', 'make_logging_undefined', + 'select_autoescape', +] + + +def _patch_async(): + from jinja2.utils import have_async_gen + if have_async_gen: + from jinja2.asyncsupport import patch_all + patch_all() + + +_patch_async() +del _patch_async diff --git a/py3/lib/python3.6/site-packages/jinja2/_compat.py b/py3/lib/python3.6/site-packages/jinja2/_compat.py new file mode 100644 index 0000000..61d8530 --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/_compat.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +""" + jinja2._compat + ~~~~~~~~~~~~~~ + + Some py2/py3 compatibility support based on a stripped down + version of six so we don't have to depend on a specific version + of it. + + :copyright: Copyright 2013 by the Jinja team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +import sys + +PY2 = sys.version_info[0] == 2 +PYPY = hasattr(sys, 'pypy_translation_info') +_identity = lambda x: x + + +if not PY2: + unichr = chr + range_type = range + text_type = str + string_types = (str,) + integer_types = (int,) + + iterkeys = lambda d: iter(d.keys()) + itervalues = lambda d: iter(d.values()) + iteritems = lambda d: iter(d.items()) + + import pickle + from io import BytesIO, StringIO + NativeStringIO = StringIO + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + ifilter = filter + imap = map + izip = zip + intern = sys.intern + + implements_iterator = _identity + implements_to_string = _identity + encode_filename = _identity + +else: + unichr = unichr + text_type = unicode + range_type = xrange + string_types = (str, unicode) + integer_types = (int, long) + + iterkeys = lambda d: d.iterkeys() + itervalues = lambda d: d.itervalues() + iteritems = lambda d: d.iteritems() + + import cPickle as pickle + from cStringIO import StringIO as BytesIO, StringIO + NativeStringIO = BytesIO + + exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') + + from itertools import imap, izip, ifilter + intern = intern + + def implements_iterator(cls): + cls.next = cls.__next__ + del cls.__next__ + return cls + + def implements_to_string(cls): + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda x: x.__unicode__().encode('utf-8') + return cls + + def encode_filename(filename): + if isinstance(filename, unicode): + return filename.encode('utf-8') + return filename + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a + # dummy metaclass for one level of class instantiation that replaces + # itself with the actual metaclass. + class metaclass(type): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +try: + from urllib.parse import quote_from_bytes as url_quote +except ImportError: + from urllib import quote as url_quote diff --git a/py3/lib/python3.6/site-packages/jinja2/_identifier.py b/py3/lib/python3.6/site-packages/jinja2/_identifier.py new file mode 100644 index 0000000..2eac35d --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/_identifier.py @@ -0,0 +1,2 @@ +# generated by scripts/generate_identifier_pattern.py +pattern = '·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯' diff --git a/py3/lib/python3.6/site-packages/jinja2/asyncfilters.py b/py3/lib/python3.6/site-packages/jinja2/asyncfilters.py new file mode 100644 index 0000000..5c1f46d --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/asyncfilters.py @@ -0,0 +1,146 @@ +from functools import wraps + +from jinja2.asyncsupport import auto_aiter +from jinja2 import filters + + +async def auto_to_seq(value): + seq = [] + if hasattr(value, '__aiter__'): + async for item in value: + seq.append(item) + else: + for item in value: + seq.append(item) + return seq + + +async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): + seq, func = filters.prepare_select_or_reject( + args, kwargs, modfunc, lookup_attr) + if seq: + async for item in auto_aiter(seq): + if func(item): + yield item + + +def dualfilter(normal_filter, async_filter): + wrap_evalctx = False + if getattr(normal_filter, 'environmentfilter', False): + is_async = lambda args: args[0].is_async + wrap_evalctx = False + else: + if not getattr(normal_filter, 'evalcontextfilter', False) and \ + not getattr(normal_filter, 'contextfilter', False): + wrap_evalctx = True + is_async = lambda args: args[0].environment.is_async + + @wraps(normal_filter) + def wrapper(*args, **kwargs): + b = is_async(args) + if wrap_evalctx: + args = args[1:] + if b: + return async_filter(*args, **kwargs) + return normal_filter(*args, **kwargs) + + if wrap_evalctx: + wrapper.evalcontextfilter = True + + wrapper.asyncfiltervariant = True + + return wrapper + + +def asyncfiltervariant(original): + def decorator(f): + return dualfilter(original, f) + return decorator + + +@asyncfiltervariant(filters.do_first) +async def do_first(environment, seq): + try: + return await auto_aiter(seq).__anext__() + except StopAsyncIteration: + return environment.undefined('No first item, sequence was empty.') + + +@asyncfiltervariant(filters.do_groupby) +async def do_groupby(environment, value, attribute): + expr = filters.make_attrgetter(environment, attribute) + return [filters._GroupTuple(key, await auto_to_seq(values)) + for key, values in filters.groupby(sorted( + await auto_to_seq(value), key=expr), expr)] + + +@asyncfiltervariant(filters.do_join) +async def do_join(eval_ctx, value, d=u'', attribute=None): + return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute) + + +@asyncfiltervariant(filters.do_list) +async def do_list(value): + return await auto_to_seq(value) + + +@asyncfiltervariant(filters.do_reject) +async def do_reject(*args, **kwargs): + return async_select_or_reject(args, kwargs, lambda x: not x, False) + + +@asyncfiltervariant(filters.do_rejectattr) +async def do_rejectattr(*args, **kwargs): + return async_select_or_reject(args, kwargs, lambda x: not x, True) + + +@asyncfiltervariant(filters.do_select) +async def do_select(*args, **kwargs): + return async_select_or_reject(args, kwargs, lambda x: x, False) + + +@asyncfiltervariant(filters.do_selectattr) +async def do_selectattr(*args, **kwargs): + return async_select_or_reject(args, kwargs, lambda x: x, True) + + +@asyncfiltervariant(filters.do_map) +async def do_map(*args, **kwargs): + seq, func = filters.prepare_map(args, kwargs) + if seq: + async for item in auto_aiter(seq): + yield func(item) + + +@asyncfiltervariant(filters.do_sum) +async def do_sum(environment, iterable, attribute=None, start=0): + rv = start + if attribute is not None: + func = filters.make_attrgetter(environment, attribute) + else: + func = lambda x: x + async for item in auto_aiter(iterable): + rv += func(item) + return rv + + +@asyncfiltervariant(filters.do_slice) +async def do_slice(value, slices, fill_with=None): + return filters.do_slice(await auto_to_seq(value), slices, fill_with) + + +ASYNC_FILTERS = { + 'first': do_first, + 'groupby': do_groupby, + 'join': do_join, + 'list': do_list, + # we intentionally do not support do_last because that would be + # ridiculous + 'reject': do_reject, + 'rejectattr': do_rejectattr, + 'map': do_map, + 'select': do_select, + 'selectattr': do_selectattr, + 'sum': do_sum, + 'slice': do_slice, +} diff --git a/py3/lib/python3.6/site-packages/jinja2/asyncsupport.py b/py3/lib/python3.6/site-packages/jinja2/asyncsupport.py new file mode 100644 index 0000000..b1e7b5c --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/asyncsupport.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- +""" + jinja2.asyncsupport + ~~~~~~~~~~~~~~~~~~~ + + Has all the code for async support which is implemented as a patch + for supported Python versions. + + :copyright: (c) 2017 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +import sys +import asyncio +import inspect +from functools import update_wrapper + +from jinja2.utils import concat, internalcode, Markup +from jinja2.environment import TemplateModule +from jinja2.runtime import LoopContextBase, _last_iteration + + +async def concat_async(async_gen): + rv = [] + async def collect(): + async for event in async_gen: + rv.append(event) + await collect() + return concat(rv) + + +async def generate_async(self, *args, **kwargs): + vars = dict(*args, **kwargs) + try: + async for event in self.root_render_func(self.new_context(vars)): + yield event + except Exception: + exc_info = sys.exc_info() + else: + return + yield self.environment.handle_exception(exc_info, True) + + +def wrap_generate_func(original_generate): + def _convert_generator(self, loop, args, kwargs): + async_gen = self.generate_async(*args, **kwargs) + try: + while 1: + yield loop.run_until_complete(async_gen.__anext__()) + except StopAsyncIteration: + pass + def generate(self, *args, **kwargs): + if not self.environment.is_async: + return original_generate(self, *args, **kwargs) + return _convert_generator(self, asyncio.get_event_loop(), args, kwargs) + return update_wrapper(generate, original_generate) + + +async def render_async(self, *args, **kwargs): + if not self.environment.is_async: + raise RuntimeError('The environment was not created with async mode ' + 'enabled.') + + vars = dict(*args, **kwargs) + ctx = self.new_context(vars) + + try: + return await concat_async(self.root_render_func(ctx)) + except Exception: + exc_info = sys.exc_info() + return self.environment.handle_exception(exc_info, True) + + +def wrap_render_func(original_render): + def render(self, *args, **kwargs): + if not self.environment.is_async: + return original_render(self, *args, **kwargs) + loop = asyncio.get_event_loop() + return loop.run_until_complete(self.render_async(*args, **kwargs)) + return update_wrapper(render, original_render) + + +def wrap_block_reference_call(original_call): + @internalcode + async def async_call(self): + rv = await concat_async(self._stack[self._depth](self._context)) + if self._context.eval_ctx.autoescape: + rv = Markup(rv) + return rv + + @internalcode + def __call__(self): + if not self._context.environment.is_async: + return original_call(self) + return async_call(self) + + return update_wrapper(__call__, original_call) + + +def wrap_macro_invoke(original_invoke): + @internalcode + async def async_invoke(self, arguments, autoescape): + rv = await self._func(*arguments) + if autoescape: + rv = Markup(rv) + return rv + + @internalcode + def _invoke(self, arguments, autoescape): + if not self._environment.is_async: + return original_invoke(self, arguments, autoescape) + return async_invoke(self, arguments, autoescape) + return update_wrapper(_invoke, original_invoke) + + +@internalcode +async def get_default_module_async(self): + if self._module is not None: + return self._module + self._module = rv = await self.make_module_async() + return rv + + +def wrap_default_module(original_default_module): + @internalcode + def _get_default_module(self): + if self.environment.is_async: + raise RuntimeError('Template module attribute is unavailable ' + 'in async mode') + return original_default_module(self) + return _get_default_module + + +async def make_module_async(self, vars=None, shared=False, locals=None): + context = self.new_context(vars, shared, locals) + body_stream = [] + async for item in self.root_render_func(context): + body_stream.append(item) + return TemplateModule(self, context, body_stream) + + +def patch_template(): + from jinja2 import Template + Template.generate = wrap_generate_func(Template.generate) + Template.generate_async = update_wrapper( + generate_async, Template.generate_async) + Template.render_async = update_wrapper( + render_async, Template.render_async) + Template.render = wrap_render_func(Template.render) + Template._get_default_module = wrap_default_module( + Template._get_default_module) + Template._get_default_module_async = get_default_module_async + Template.make_module_async = update_wrapper( + make_module_async, Template.make_module_async) + + +def patch_runtime(): + from jinja2.runtime import BlockReference, Macro + BlockReference.__call__ = wrap_block_reference_call( + BlockReference.__call__) + Macro._invoke = wrap_macro_invoke(Macro._invoke) + + +def patch_filters(): + from jinja2.filters import FILTERS + from jinja2.asyncfilters import ASYNC_FILTERS + FILTERS.update(ASYNC_FILTERS) + + +def patch_all(): + patch_template() + patch_runtime() + patch_filters() + + +async def auto_await(value): + if inspect.isawaitable(value): + return await value + return value + + +async def auto_aiter(iterable): + if hasattr(iterable, '__aiter__'): + async for item in iterable: + yield item + return + for item in iterable: + yield item + + +class AsyncLoopContext(LoopContextBase): + + def __init__(self, async_iterator, undefined, after, length, recurse=None, + depth0=0): + LoopContextBase.__init__(self, undefined, recurse, depth0) + self._async_iterator = async_iterator + self._after = after + self._length = length + + @property + def length(self): + if self._length is None: + raise TypeError('Loop length for some iterators cannot be ' + 'lazily calculated in async mode') + return self._length + + def __aiter__(self): + return AsyncLoopContextIterator(self) + + +class AsyncLoopContextIterator(object): + __slots__ = ('context',) + + def __init__(self, context): + self.context = context + + def __aiter__(self): + return self + + async def __anext__(self): + ctx = self.context + ctx.index0 += 1 + if ctx._after is _last_iteration: + raise StopAsyncIteration() + ctx._before = ctx._current + ctx._current = ctx._after + try: + ctx._after = await ctx._async_iterator.__anext__() + except StopAsyncIteration: + ctx._after = _last_iteration + return ctx._current, ctx + + +async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0): + # Length is more complicated and less efficient in async mode. The + # reason for this is that we cannot know if length will be used + # upfront but because length is a property we cannot lazily execute it + # later. This means that we need to buffer it up and measure :( + # + # We however only do this for actual iterators, not for async + # iterators as blocking here does not seem like the best idea in the + # world. + try: + length = len(iterable) + except (TypeError, AttributeError): + if not hasattr(iterable, '__aiter__'): + iterable = tuple(iterable) + length = len(iterable) + else: + length = None + async_iterator = auto_aiter(iterable) + try: + after = await async_iterator.__anext__() + except StopAsyncIteration: + after = _last_iteration + return AsyncLoopContext(async_iterator, undefined, after, length, recurse, + depth0) diff --git a/py3/lib/python3.6/site-packages/jinja2/bccache.py b/py3/lib/python3.6/site-packages/jinja2/bccache.py new file mode 100644 index 0000000..080e527 --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/bccache.py @@ -0,0 +1,362 @@ +# -*- coding: utf-8 -*- +""" + jinja2.bccache + ~~~~~~~~~~~~~~ + + This module implements the bytecode cache system Jinja is optionally + using. This is useful if you have very complex template situations and + the compiliation of all those templates slow down your application too + much. + + Situations where this is useful are often forking web applications that + are initialized on the first request. + + :copyright: (c) 2017 by the Jinja Team. + :license: BSD. +""" +from os import path, listdir +import os +import sys +import stat +import errno +import marshal +import tempfile +import fnmatch +from hashlib import sha1 +from jinja2.utils import open_if_exists +from jinja2._compat import BytesIO, pickle, PY2, text_type + + +# marshal works better on 3.x, one hack less required +if not PY2: + marshal_dump = marshal.dump + marshal_load = marshal.load +else: + + def marshal_dump(code, f): + if isinstance(f, file): + marshal.dump(code, f) + else: + f.write(marshal.dumps(code)) + + def marshal_load(f): + if isinstance(f, file): + return marshal.load(f) + return marshal.loads(f.read()) + + +bc_version = 3 + +# magic version used to only change with new jinja versions. With 2.6 +# we change this to also take Python version changes into account. The +# reason for this is that Python tends to segfault if fed earlier bytecode +# versions because someone thought it would be a good idea to reuse opcodes +# or make Python incompatible with earlier versions. +bc_magic = 'j2'.encode('ascii') + \ + pickle.dumps(bc_version, 2) + \ + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1]) + + +class Bucket(object): + """Buckets are used to store the bytecode for one template. It's created + and initialized by the bytecode cache and passed to the loading functions. + + The buckets get an internal checksum from the cache assigned and use this + to automatically reject outdated cache material. Individual bytecode + cache subclasses don't have to care about cache invalidation. + """ + + def __init__(self, environment, key, checksum): + self.environment = environment + self.key = key + self.checksum = checksum + self.reset() + + def reset(self): + """Resets the bucket (unloads the bytecode).""" + self.code = None + + def load_bytecode(self, f): + """Loads bytecode from a file or file like object.""" + # make sure the magic header is correct + magic = f.read(len(bc_magic)) + if magic != bc_magic: + self.reset() + return + # the source code of the file changed, we need to reload + checksum = pickle.load(f) + if self.checksum != checksum: + self.reset() + return + # if marshal_load fails then we need to reload + try: + self.code = marshal_load(f) + except (EOFError, ValueError, TypeError): + self.reset() + return + + def write_bytecode(self, f): + """Dump the bytecode into the file or file like object passed.""" + if self.code is None: + raise TypeError('can\'t write empty bucket') + f.write(bc_magic) + pickle.dump(self.checksum, f, 2) + marshal_dump(self.code, f) + + def bytecode_from_string(self, string): + """Load bytecode from a string.""" + self.load_bytecode(BytesIO(string)) + + def bytecode_to_string(self): + """Return the bytecode as string.""" + out = BytesIO() + self.write_bytecode(out) + return out.getvalue() + + +class BytecodeCache(object): + """To implement your own bytecode cache you have to subclass this class + and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of + these methods are passed a :class:`~jinja2.bccache.Bucket`. + + A very basic bytecode cache that saves the bytecode on the file system:: + + from os import path + + class MyCache(BytecodeCache): + + def __init__(self, directory): + self.directory = directory + + def load_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + if path.exists(filename): + with open(filename, 'rb') as f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + with open(filename, 'wb') as f: + bucket.write_bytecode(f) + + A more advanced version of a filesystem based bytecode cache is part of + Jinja2. + """ + + def load_bytecode(self, bucket): + """Subclasses have to override this method to load bytecode into a + bucket. If they are not able to find code in the cache for the + bucket, it must not do anything. + """ + raise NotImplementedError() + + def dump_bytecode(self, bucket): + """Subclasses have to override this method to write the bytecode + from a bucket back to the cache. If it unable to do so it must not + fail silently but raise an exception. + """ + raise NotImplementedError() + + def clear(self): + """Clears the cache. This method is not used by Jinja2 but should be + implemented to allow applications to clear the bytecode cache used + by a particular environment. + """ + + def get_cache_key(self, name, filename=None): + """Returns the unique hash key for this template name.""" + hash = sha1(name.encode('utf-8')) + if filename is not None: + filename = '|' + filename + if isinstance(filename, text_type): + filename = filename.encode('utf-8') + hash.update(filename) + return hash.hexdigest() + + def get_source_checksum(self, source): + """Returns a checksum for the source.""" + return sha1(source.encode('utf-8')).hexdigest() + + def get_bucket(self, environment, name, filename, source): + """Return a cache bucket for the given template. All arguments are + mandatory but filename may be `None`. + """ + key = self.get_cache_key(name, filename) + checksum = self.get_source_checksum(source) + bucket = Bucket(environment, key, checksum) + self.load_bytecode(bucket) + return bucket + + def set_bucket(self, bucket): + """Put the bucket into the cache.""" + self.dump_bytecode(bucket) + + +class FileSystemBytecodeCache(BytecodeCache): + """A bytecode cache that stores bytecode on the filesystem. It accepts + two arguments: The directory where the cache items are stored and a + pattern string that is used to build the filename. + + If no directory is specified a default cache directory is selected. On + Windows the user's temp directory is used, on UNIX systems a directory + is created for the user in the system temp directory. + + The pattern can be used to have multiple separate caches operate on the + same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` + is replaced with the cache key. + + >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') + + This bytecode cache supports clearing of the cache using the clear method. + """ + + def __init__(self, directory=None, pattern='__jinja2_%s.cache'): + if directory is None: + directory = self._get_default_cache_dir() + self.directory = directory + self.pattern = pattern + + def _get_default_cache_dir(self): + def _unsafe_dir(): + raise RuntimeError('Cannot determine safe temp directory. You ' + 'need to explicitly provide one.') + + tmpdir = tempfile.gettempdir() + + # On windows the temporary directory is used specific unless + # explicitly forced otherwise. We can just use that. + if os.name == 'nt': + return tmpdir + if not hasattr(os, 'getuid'): + _unsafe_dir() + + dirname = '_jinja2-cache-%d' % os.getuid() + actual_dir = os.path.join(tmpdir, dirname) + + try: + os.mkdir(actual_dir, stat.S_IRWXU) + except OSError as e: + if e.errno != errno.EEXIST: + raise + try: + os.chmod(actual_dir, stat.S_IRWXU) + actual_dir_stat = os.lstat(actual_dir) + if actual_dir_stat.st_uid != os.getuid() \ + or not stat.S_ISDIR(actual_dir_stat.st_mode) \ + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU: + _unsafe_dir() + except OSError as e: + if e.errno != errno.EEXIST: + raise + + actual_dir_stat = os.lstat(actual_dir) + if actual_dir_stat.st_uid != os.getuid() \ + or not stat.S_ISDIR(actual_dir_stat.st_mode) \ + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU: + _unsafe_dir() + + return actual_dir + + def _get_cache_filename(self, bucket): + return path.join(self.directory, self.pattern % bucket.key) + + def load_bytecode(self, bucket): + f = open_if_exists(self._get_cache_filename(bucket), 'rb') + if f is not None: + try: + bucket.load_bytecode(f) + finally: + f.close() + + def dump_bytecode(self, bucket): + f = open(self._get_cache_filename(bucket), 'wb') + try: + bucket.write_bytecode(f) + finally: + f.close() + + def clear(self): + # imported lazily here because google app-engine doesn't support + # write access on the file system and the function does not exist + # normally. + from os import remove + files = fnmatch.filter(listdir(self.directory), self.pattern % '*') + for filename in files: + try: + remove(path.join(self.directory, filename)) + except OSError: + pass + + +class MemcachedBytecodeCache(BytecodeCache): + """This class implements a bytecode cache that uses a memcache cache for + storing the information. It does not enforce a specific memcache library + (tummy's memcache or cmemcache) but will accept any class that provides + the minimal interface required. + + Libraries compatible with this class: + + - `werkzeug `_.contrib.cache + - `python-memcached `_ + - `cmemcache `_ + + (Unfortunately the django cache interface is not compatible because it + does not support storing binary data, only unicode. You can however pass + the underlying cache client to the bytecode cache which is available + as `django.core.cache.cache._client`.) + + The minimal interface for the client passed to the constructor is this: + + .. class:: MinimalClientInterface + + .. method:: set(key, value[, timeout]) + + Stores the bytecode in the cache. `value` is a string and + `timeout` the timeout of the key. If timeout is not provided + a default timeout or no timeout should be assumed, if it's + provided it's an integer with the number of seconds the cache + item should exist. + + .. method:: get(key) + + Returns the value for the cache key. If the item does not + exist in the cache the return value must be `None`. + + The other arguments to the constructor are the prefix for all keys that + is added before the actual cache key and the timeout for the bytecode in + the cache system. We recommend a high (or no) timeout. + + This bytecode cache does not support clearing of used items in the cache. + The clear method is a no-operation function. + + .. versionadded:: 2.7 + Added support for ignoring memcache errors through the + `ignore_memcache_errors` parameter. + """ + + def __init__(self, client, prefix='jinja2/bytecode/', timeout=None, + ignore_memcache_errors=True): + self.client = client + self.prefix = prefix + self.timeout = timeout + self.ignore_memcache_errors = ignore_memcache_errors + + def load_bytecode(self, bucket): + try: + code = self.client.get(self.prefix + bucket.key) + except Exception: + if not self.ignore_memcache_errors: + raise + code = None + if code is not None: + bucket.bytecode_from_string(code) + + def dump_bytecode(self, bucket): + args = (self.prefix + bucket.key, bucket.bytecode_to_string()) + if self.timeout is not None: + args += (self.timeout,) + try: + self.client.set(*args) + except Exception: + if not self.ignore_memcache_errors: + raise diff --git a/py3/lib/python3.6/site-packages/jinja2/compiler.py b/py3/lib/python3.6/site-packages/jinja2/compiler.py new file mode 100644 index 0000000..d534a82 --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/compiler.py @@ -0,0 +1,1721 @@ +# -*- coding: utf-8 -*- +""" + jinja2.compiler + ~~~~~~~~~~~~~~~ + + Compiles nodes into python code. + + :copyright: (c) 2017 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +from itertools import chain +from copy import deepcopy +from keyword import iskeyword as is_python_keyword +from functools import update_wrapper +from jinja2 import nodes +from jinja2.nodes import EvalContext +from jinja2.visitor import NodeVisitor +from jinja2.optimizer import Optimizer +from jinja2.exceptions import TemplateAssertionError +from jinja2.utils import Markup, concat, escape +from jinja2._compat import range_type, text_type, string_types, \ + iteritems, NativeStringIO, imap, izip +from jinja2.idtracking import Symbols, VAR_LOAD_PARAMETER, \ + VAR_LOAD_RESOLVE, VAR_LOAD_ALIAS, VAR_LOAD_UNDEFINED + + +operators = { + 'eq': '==', + 'ne': '!=', + 'gt': '>', + 'gteq': '>=', + 'lt': '<', + 'lteq': '<=', + 'in': 'in', + 'notin': 'not in' +} + +# what method to iterate over items do we want to use for dict iteration +# in generated code? on 2.x let's go with iteritems, on 3.x with items +if hasattr(dict, 'iteritems'): + dict_item_iter = 'iteritems' +else: + dict_item_iter = 'items' + +code_features = ['division'] + +# does this python version support generator stops? (PEP 0479) +try: + exec('from __future__ import generator_stop') + code_features.append('generator_stop') +except SyntaxError: + pass + +# does this python version support yield from? +try: + exec('def f(): yield from x()') +except SyntaxError: + supports_yield_from = False +else: + supports_yield_from = True + + +def optimizeconst(f): + def new_func(self, node, frame, **kwargs): + # Only optimize if the frame is not volatile + if self.optimized and not frame.eval_ctx.volatile: + new_node = self.optimizer.visit(node, frame.eval_ctx) + if new_node != node: + return self.visit(new_node, frame) + return f(self, node, frame, **kwargs) + return update_wrapper(new_func, f) + + +def generate(node, environment, name, filename, stream=None, + defer_init=False, optimized=True): + """Generate the python source for a node tree.""" + if not isinstance(node, nodes.Template): + raise TypeError('Can\'t compile non template nodes') + generator = environment.code_generator_class(environment, name, filename, + stream, defer_init, + optimized) + generator.visit(node) + if stream is None: + return generator.stream.getvalue() + + +def has_safe_repr(value): + """Does the node have a safe representation?""" + if value is None or value is NotImplemented or value is Ellipsis: + return True + if type(value) in (bool, int, float, complex, range_type, Markup) + string_types: + return True + if type(value) in (tuple, list, set, frozenset): + for item in value: + if not has_safe_repr(item): + return False + return True + elif type(value) is dict: + for key, value in iteritems(value): + if not has_safe_repr(key): + return False + if not has_safe_repr(value): + return False + return True + return False + + +def find_undeclared(nodes, names): + """Check if the names passed are accessed undeclared. The return value + is a set of all the undeclared names from the sequence of names found. + """ + visitor = UndeclaredNameVisitor(names) + try: + for node in nodes: + visitor.visit(node) + except VisitorExit: + pass + return visitor.undeclared + + +class MacroRef(object): + + def __init__(self, node): + self.node = node + self.accesses_caller = False + self.accesses_kwargs = False + self.accesses_varargs = False + + +class Frame(object): + """Holds compile time information for us.""" + + def __init__(self, eval_ctx, parent=None, level=None): + self.eval_ctx = eval_ctx + self.symbols = Symbols(parent and parent.symbols or None, + level=level) + + # a toplevel frame is the root + soft frames such as if conditions. + self.toplevel = False + + # the root frame is basically just the outermost frame, so no if + # conditions. This information is used to optimize inheritance + # situations. + self.rootlevel = False + + # in some dynamic inheritance situations the compiler needs to add + # write tests around output statements. + self.require_output_check = parent and parent.require_output_check + + # inside some tags we are using a buffer rather than yield statements. + # this for example affects {% filter %} or {% macro %}. If a frame + # is buffered this variable points to the name of the list used as + # buffer. + self.buffer = None + + # the name of the block we're in, otherwise None. + self.block = parent and parent.block or None + + # the parent of this frame + self.parent = parent + + if parent is not None: + self.buffer = parent.buffer + + def copy(self): + """Create a copy of the current one.""" + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.symbols = self.symbols.copy() + return rv + + def inner(self, isolated=False): + """Return an inner frame.""" + if isolated: + return Frame(self.eval_ctx, level=self.symbols.level + 1) + return Frame(self.eval_ctx, self) + + def soft(self): + """Return a soft frame. A soft frame may not be modified as + standalone thing as it shares the resources with the frame it + was created of, but it's not a rootlevel frame any longer. + + This is only used to implement if-statements. + """ + rv = self.copy() + rv.rootlevel = False + return rv + + __copy__ = copy + + +class VisitorExit(RuntimeError): + """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" + + +class DependencyFinderVisitor(NodeVisitor): + """A visitor that collects filter and test calls.""" + + def __init__(self): + self.filters = set() + self.tests = set() + + def visit_Filter(self, node): + self.generic_visit(node) + self.filters.add(node.name) + + def visit_Test(self, node): + self.generic_visit(node) + self.tests.add(node.name) + + def visit_Block(self, node): + """Stop visiting at blocks.""" + + +class UndeclaredNameVisitor(NodeVisitor): + """A visitor that checks if a name is accessed without being + declared. This is different from the frame visitor as it will + not stop at closure frames. + """ + + def __init__(self, names): + self.names = set(names) + self.undeclared = set() + + def visit_Name(self, node): + if node.ctx == 'load' and node.name in self.names: + self.undeclared.add(node.name) + if self.undeclared == self.names: + raise VisitorExit() + else: + self.names.discard(node.name) + + def visit_Block(self, node): + """Stop visiting a blocks.""" + + +class CompilerExit(Exception): + """Raised if the compiler encountered a situation where it just + doesn't make sense to further process the code. Any block that + raises such an exception is not further processed. + """ + + +class CodeGenerator(NodeVisitor): + + def __init__(self, environment, name, filename, stream=None, + defer_init=False, optimized=True): + if stream is None: + stream = NativeStringIO() + self.environment = environment + self.name = name + self.filename = filename + self.stream = stream + self.created_block_context = False + self.defer_init = defer_init + self.optimized = optimized + if optimized: + self.optimizer = Optimizer(environment) + + # aliases for imports + self.import_aliases = {} + + # a registry for all blocks. Because blocks are moved out + # into the global python scope they are registered here + self.blocks = {} + + # the number of extends statements so far + self.extends_so_far = 0 + + # some templates have a rootlevel extends. In this case we + # can safely assume that we're a child template and do some + # more optimizations. + self.has_known_extends = False + + # the current line number + self.code_lineno = 1 + + # registry of all filters and tests (global, not block local) + self.tests = {} + self.filters = {} + + # the debug information + self.debug_info = [] + self._write_debug_info = None + + # the number of new lines before the next write() + self._new_lines = 0 + + # the line number of the last written statement + self._last_line = 0 + + # true if nothing was written so far. + self._first_write = True + + # used by the `temporary_identifier` method to get new + # unique, temporary identifier + self._last_identifier = 0 + + # the current indentation + self._indentation = 0 + + # Tracks toplevel assignments + self._assign_stack = [] + + # Tracks parameter definition blocks + self._param_def_block = [] + + # Tracks the current context. + self._context_reference_stack = ['context'] + + # -- Various compilation helpers + + def fail(self, msg, lineno): + """Fail with a :exc:`TemplateAssertionError`.""" + raise TemplateAssertionError(msg, lineno, self.name, self.filename) + + def temporary_identifier(self): + """Get a new unique identifier.""" + self._last_identifier += 1 + return 't_%d' % self._last_identifier + + def buffer(self, frame): + """Enable buffering for the frame from that point onwards.""" + frame.buffer = self.temporary_identifier() + self.writeline('%s = []' % frame.buffer) + + def return_buffer_contents(self, frame, force_unescaped=False): + """Return the buffer contents of the frame.""" + if not force_unescaped: + if frame.eval_ctx.volatile: + self.writeline('if context.eval_ctx.autoescape:') + self.indent() + self.writeline('return Markup(concat(%s))' % frame.buffer) + self.outdent() + self.writeline('else:') + self.indent() + self.writeline('return concat(%s)' % frame.buffer) + self.outdent() + return + elif frame.eval_ctx.autoescape: + self.writeline('return Markup(concat(%s))' % frame.buffer) + return + self.writeline('return concat(%s)' % frame.buffer) + + def indent(self): + """Indent by one.""" + self._indentation += 1 + + def outdent(self, step=1): + """Outdent by step.""" + self._indentation -= step + + def start_write(self, frame, node=None): + """Yield or write into the frame buffer.""" + if frame.buffer is None: + self.writeline('yield ', node) + else: + self.writeline('%s.append(' % frame.buffer, node) + + def end_write(self, frame): + """End the writing process started by `start_write`.""" + if frame.buffer is not None: + self.write(')') + + def simple_write(self, s, frame, node=None): + """Simple shortcut for start_write + write + end_write.""" + self.start_write(frame, node) + self.write(s) + self.end_write(frame) + + def blockvisit(self, nodes, frame): + """Visit a list of nodes as block in a frame. If the current frame + is no buffer a dummy ``if 0: yield None`` is written automatically. + """ + try: + self.writeline('pass') + for node in nodes: + self.visit(node, frame) + except CompilerExit: + pass + + def write(self, x): + """Write a string into the output stream.""" + if self._new_lines: + if not self._first_write: + self.stream.write('\n' * self._new_lines) + self.code_lineno += self._new_lines + if self._write_debug_info is not None: + self.debug_info.append((self._write_debug_info, + self.code_lineno)) + self._write_debug_info = None + self._first_write = False + self.stream.write(' ' * self._indentation) + self._new_lines = 0 + self.stream.write(x) + + def writeline(self, x, node=None, extra=0): + """Combination of newline and write.""" + self.newline(node, extra) + self.write(x) + + def newline(self, node=None, extra=0): + """Add one or more newlines before the next write.""" + self._new_lines = max(self._new_lines, 1 + extra) + if node is not None and node.lineno != self._last_line: + self._write_debug_info = node.lineno + self._last_line = node.lineno + + def signature(self, node, frame, extra_kwargs=None): + """Writes a function call to the stream for the current node. + A leading comma is added automatically. The extra keyword + arguments may not include python keywords otherwise a syntax + error could occour. The extra keyword arguments should be given + as python dict. + """ + # if any of the given keyword arguments is a python keyword + # we have to make sure that no invalid call is created. + kwarg_workaround = False + for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()): + if is_python_keyword(kwarg): + kwarg_workaround = True + break + + for arg in node.args: + self.write(', ') + self.visit(arg, frame) + + if not kwarg_workaround: + for kwarg in node.kwargs: + self.write(', ') + self.visit(kwarg, frame) + if extra_kwargs is not None: + for key, value in iteritems(extra_kwargs): + self.write(', %s=%s' % (key, value)) + if node.dyn_args: + self.write(', *') + self.visit(node.dyn_args, frame) + + if kwarg_workaround: + if node.dyn_kwargs is not None: + self.write(', **dict({') + else: + self.write(', **{') + for kwarg in node.kwargs: + self.write('%r: ' % kwarg.key) + self.visit(kwarg.value, frame) + self.write(', ') + if extra_kwargs is not None: + for key, value in iteritems(extra_kwargs): + self.write('%r: %s, ' % (key, value)) + if node.dyn_kwargs is not None: + self.write('}, **') + self.visit(node.dyn_kwargs, frame) + self.write(')') + else: + self.write('}') + + elif node.dyn_kwargs is not None: + self.write(', **') + self.visit(node.dyn_kwargs, frame) + + def pull_dependencies(self, nodes): + """Pull all the dependencies.""" + visitor = DependencyFinderVisitor() + for node in nodes: + visitor.visit(node) + for dependency in 'filters', 'tests': + mapping = getattr(self, dependency) + for name in getattr(visitor, dependency): + if name not in mapping: + mapping[name] = self.temporary_identifier() + self.writeline('%s = environment.%s[%r]' % + (mapping[name], dependency, name)) + + def enter_frame(self, frame): + undefs = [] + for target, (action, param) in iteritems(frame.symbols.loads): + if action == VAR_LOAD_PARAMETER: + pass + elif action == VAR_LOAD_RESOLVE: + self.writeline('%s = %s(%r)' % + (target, self.get_resolve_func(), param)) + elif action == VAR_LOAD_ALIAS: + self.writeline('%s = %s' % (target, param)) + elif action == VAR_LOAD_UNDEFINED: + undefs.append(target) + else: + raise NotImplementedError('unknown load instruction') + if undefs: + self.writeline('%s = missing' % ' = '.join(undefs)) + + def leave_frame(self, frame, with_python_scope=False): + if not with_python_scope: + undefs = [] + for target, _ in iteritems(frame.symbols.loads): + undefs.append(target) + if undefs: + self.writeline('%s = missing' % ' = '.join(undefs)) + + def func(self, name): + if self.environment.is_async: + return 'async def %s' % name + return 'def %s' % name + + def macro_body(self, node, frame): + """Dump the function def of a macro or call block.""" + frame = frame.inner() + frame.symbols.analyze_node(node) + macro_ref = MacroRef(node) + + explicit_caller = None + skip_special_params = set() + args = [] + for idx, arg in enumerate(node.args): + if arg.name == 'caller': + explicit_caller = idx + if arg.name in ('kwargs', 'varargs'): + skip_special_params.add(arg.name) + args.append(frame.symbols.ref(arg.name)) + + undeclared = find_undeclared(node.body, ('caller', 'kwargs', 'varargs')) + + if 'caller' in undeclared: + # In older Jinja2 versions there was a bug that allowed caller + # to retain the special behavior even if it was mentioned in + # the argument list. However thankfully this was only really + # working if it was the last argument. So we are explicitly + # checking this now and error out if it is anywhere else in + # the argument list. + if explicit_caller is not None: + try: + node.defaults[explicit_caller - len(node.args)] + except IndexError: + self.fail('When defining macros or call blocks the ' + 'special "caller" argument must be omitted ' + 'or be given a default.', node.lineno) + else: + args.append(frame.symbols.declare_parameter('caller')) + macro_ref.accesses_caller = True + if 'kwargs' in undeclared and not 'kwargs' in skip_special_params: + args.append(frame.symbols.declare_parameter('kwargs')) + macro_ref.accesses_kwargs = True + if 'varargs' in undeclared and not 'varargs' in skip_special_params: + args.append(frame.symbols.declare_parameter('varargs')) + macro_ref.accesses_varargs = True + + # macros are delayed, they never require output checks + frame.require_output_check = False + frame.symbols.analyze_node(node) + self.writeline('%s(%s):' % (self.func('macro'), ', '.join(args)), node) + self.indent() + + self.buffer(frame) + self.enter_frame(frame) + + self.push_parameter_definitions(frame) + for idx, arg in enumerate(node.args): + ref = frame.symbols.ref(arg.name) + self.writeline('if %s is missing:' % ref) + self.indent() + try: + default = node.defaults[idx - len(node.args)] + except IndexError: + self.writeline('%s = undefined(%r, name=%r)' % ( + ref, + 'parameter %r was not provided' % arg.name, + arg.name)) + else: + self.writeline('%s = ' % ref) + self.visit(default, frame) + self.mark_parameter_stored(ref) + self.outdent() + self.pop_parameter_definitions() + + self.blockvisit(node.body, frame) + self.return_buffer_contents(frame, force_unescaped=True) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + return frame, macro_ref + + def macro_def(self, macro_ref, frame): + """Dump the macro definition for the def created by macro_body.""" + arg_tuple = ', '.join(repr(x.name) for x in macro_ref.node.args) + name = getattr(macro_ref.node, 'name', None) + if len(macro_ref.node.args) == 1: + arg_tuple += ',' + self.write('Macro(environment, macro, %r, (%s), %r, %r, %r, ' + 'context.eval_ctx.autoescape)' % + (name, arg_tuple, macro_ref.accesses_kwargs, + macro_ref.accesses_varargs, macro_ref.accesses_caller)) + + def position(self, node): + """Return a human readable position for the node.""" + rv = 'line %d' % node.lineno + if self.name is not None: + rv += ' in ' + repr(self.name) + return rv + + def dump_local_context(self, frame): + return '{%s}' % ', '.join( + '%r: %s' % (name, target) for name, target + in iteritems(frame.symbols.dump_stores())) + + def write_commons(self): + """Writes a common preamble that is used by root and block functions. + Primarily this sets up common local helpers and enforces a generator + through a dead branch. + """ + self.writeline('resolve = context.resolve_or_missing') + self.writeline('undefined = environment.undefined') + self.writeline('if 0: yield None') + + def push_parameter_definitions(self, frame): + """Pushes all parameter targets from the given frame into a local + stack that permits tracking of yet to be assigned parameters. In + particular this enables the optimization from `visit_Name` to skip + undefined expressions for parameters in macros as macros can reference + otherwise unbound parameters. + """ + self._param_def_block.append(frame.symbols.dump_param_targets()) + + def pop_parameter_definitions(self): + """Pops the current parameter definitions set.""" + self._param_def_block.pop() + + def mark_parameter_stored(self, target): + """Marks a parameter in the current parameter definitions as stored. + This will skip the enforced undefined checks. + """ + if self._param_def_block: + self._param_def_block[-1].discard(target) + + def push_context_reference(self, target): + self._context_reference_stack.append(target) + + def pop_context_reference(self): + self._context_reference_stack.pop() + + def get_context_ref(self): + return self._context_reference_stack[-1] + + def get_resolve_func(self): + target = self._context_reference_stack[-1] + if target == 'context': + return 'resolve' + return '%s.resolve' % target + + def derive_context(self, frame): + return '%s.derived(%s)' % ( + self.get_context_ref(), + self.dump_local_context(frame), + ) + + def parameter_is_undeclared(self, target): + """Checks if a given target is an undeclared parameter.""" + if not self._param_def_block: + return False + return target in self._param_def_block[-1] + + def push_assign_tracking(self): + """Pushes a new layer for assignment tracking.""" + self._assign_stack.append(set()) + + def pop_assign_tracking(self, frame): + """Pops the topmost level for assignment tracking and updates the + context variables if necessary. + """ + vars = self._assign_stack.pop() + if not frame.toplevel or not vars: + return + public_names = [x for x in vars if x[:1] != '_'] + if len(vars) == 1: + name = next(iter(vars)) + ref = frame.symbols.ref(name) + self.writeline('context.vars[%r] = %s' % (name, ref)) + else: + self.writeline('context.vars.update({') + for idx, name in enumerate(vars): + if idx: + self.write(', ') + ref = frame.symbols.ref(name) + self.write('%r: %s' % (name, ref)) + self.write('})') + if public_names: + if len(public_names) == 1: + self.writeline('context.exported_vars.add(%r)' % + public_names[0]) + else: + self.writeline('context.exported_vars.update((%s))' % + ', '.join(imap(repr, public_names))) + + # -- Statement Visitors + + def visit_Template(self, node, frame=None): + assert frame is None, 'no root frame allowed' + eval_ctx = EvalContext(self.environment, self.name) + + from jinja2.runtime import __all__ as exported + self.writeline('from __future__ import %s' % ', '.join(code_features)) + self.writeline('from jinja2.runtime import ' + ', '.join(exported)) + + if self.environment.is_async: + self.writeline('from jinja2.asyncsupport import auto_await, ' + 'auto_aiter, make_async_loop_context') + + # if we want a deferred initialization we cannot move the + # environment into a local name + envenv = not self.defer_init and ', environment=environment' or '' + + # do we have an extends tag at all? If not, we can save some + # overhead by just not processing any inheritance code. + have_extends = node.find(nodes.Extends) is not None + + # find all blocks + for block in node.find_all(nodes.Block): + if block.name in self.blocks: + self.fail('block %r defined twice' % block.name, block.lineno) + self.blocks[block.name] = block + + # find all imports and import them + for import_ in node.find_all(nodes.ImportedName): + if import_.importname not in self.import_aliases: + imp = import_.importname + self.import_aliases[imp] = alias = self.temporary_identifier() + if '.' in imp: + module, obj = imp.rsplit('.', 1) + self.writeline('from %s import %s as %s' % + (module, obj, alias)) + else: + self.writeline('import %s as %s' % (imp, alias)) + + # add the load name + self.writeline('name = %r' % self.name) + + # generate the root render function. + self.writeline('%s(context, missing=missing%s):' % + (self.func('root'), envenv), extra=1) + self.indent() + self.write_commons() + + # process the root + frame = Frame(eval_ctx) + if 'self' in find_undeclared(node.body, ('self',)): + ref = frame.symbols.declare_parameter('self') + self.writeline('%s = TemplateReference(context)' % ref) + frame.symbols.analyze_node(node) + frame.toplevel = frame.rootlevel = True + frame.require_output_check = have_extends and not self.has_known_extends + if have_extends: + self.writeline('parent_template = None') + self.enter_frame(frame) + self.pull_dependencies(node.body) + self.blockvisit(node.body, frame) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + # make sure that the parent root is called. + if have_extends: + if not self.has_known_extends: + self.indent() + self.writeline('if parent_template is not None:') + self.indent() + if supports_yield_from and not self.environment.is_async: + self.writeline('yield from parent_template.' + 'root_render_func(context)') + else: + self.writeline('%sfor event in parent_template.' + 'root_render_func(context):' % + (self.environment.is_async and 'async ' or '')) + self.indent() + self.writeline('yield event') + self.outdent() + self.outdent(1 + (not self.has_known_extends)) + + # at this point we now have the blocks collected and can visit them too. + for name, block in iteritems(self.blocks): + self.writeline('%s(context, missing=missing%s):' % + (self.func('block_' + name), envenv), + block, 1) + self.indent() + self.write_commons() + # It's important that we do not make this frame a child of the + # toplevel template. This would cause a variety of + # interesting issues with identifier tracking. + block_frame = Frame(eval_ctx) + undeclared = find_undeclared(block.body, ('self', 'super')) + if 'self' in undeclared: + ref = block_frame.symbols.declare_parameter('self') + self.writeline('%s = TemplateReference(context)' % ref) + if 'super' in undeclared: + ref = block_frame.symbols.declare_parameter('super') + self.writeline('%s = context.super(%r, ' + 'block_%s)' % (ref, name, name)) + block_frame.symbols.analyze_node(block) + block_frame.block = name + self.enter_frame(block_frame) + self.pull_dependencies(block.body) + self.blockvisit(block.body, block_frame) + self.leave_frame(block_frame, with_python_scope=True) + self.outdent() + + self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x) + for x in self.blocks), + extra=1) + + # add a function that returns the debug info + self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x + in self.debug_info)) + + def visit_Block(self, node, frame): + """Call a block and register it for the template.""" + level = 0 + if frame.toplevel: + # if we know that we are a child template, there is no need to + # check if we are one + if self.has_known_extends: + return + if self.extends_so_far > 0: + self.writeline('if parent_template is None:') + self.indent() + level += 1 + + if node.scoped: + context = self.derive_context(frame) + else: + context = self.get_context_ref() + + if supports_yield_from and not self.environment.is_async and \ + frame.buffer is None: + self.writeline('yield from context.blocks[%r][0](%s)' % ( + node.name, context), node) + else: + loop = self.environment.is_async and 'async for' or 'for' + self.writeline('%s event in context.blocks[%r][0](%s):' % ( + loop, node.name, context), node) + self.indent() + self.simple_write('event', frame) + self.outdent() + + self.outdent(level) + + def visit_Extends(self, node, frame): + """Calls the extender.""" + if not frame.toplevel: + self.fail('cannot use extend from a non top-level scope', + node.lineno) + + # if the number of extends statements in general is zero so + # far, we don't have to add a check if something extended + # the template before this one. + if self.extends_so_far > 0: + + # if we have a known extends we just add a template runtime + # error into the generated code. We could catch that at compile + # time too, but i welcome it not to confuse users by throwing the + # same error at different times just "because we can". + if not self.has_known_extends: + self.writeline('if parent_template is not None:') + self.indent() + self.writeline('raise TemplateRuntimeError(%r)' % + 'extended multiple times') + + # if we have a known extends already we don't need that code here + # as we know that the template execution will end here. + if self.has_known_extends: + raise CompilerExit() + else: + self.outdent() + + self.writeline('parent_template = environment.get_template(', node) + self.visit(node.template, frame) + self.write(', %r)' % self.name) + self.writeline('for name, parent_block in parent_template.' + 'blocks.%s():' % dict_item_iter) + self.indent() + self.writeline('context.blocks.setdefault(name, []).' + 'append(parent_block)') + self.outdent() + + # if this extends statement was in the root level we can take + # advantage of that information and simplify the generated code + # in the top level from this point onwards + if frame.rootlevel: + self.has_known_extends = True + + # and now we have one more + self.extends_so_far += 1 + + def visit_Include(self, node, frame): + """Handles includes.""" + if node.ignore_missing: + self.writeline('try:') + self.indent() + + func_name = 'get_or_select_template' + if isinstance(node.template, nodes.Const): + if isinstance(node.template.value, string_types): + func_name = 'get_template' + elif isinstance(node.template.value, (tuple, list)): + func_name = 'select_template' + elif isinstance(node.template, (nodes.Tuple, nodes.List)): + func_name = 'select_template' + + self.writeline('template = environment.%s(' % func_name, node) + self.visit(node.template, frame) + self.write(', %r)' % self.name) + if node.ignore_missing: + self.outdent() + self.writeline('except TemplateNotFound:') + self.indent() + self.writeline('pass') + self.outdent() + self.writeline('else:') + self.indent() + + skip_event_yield = False + if node.with_context: + loop = self.environment.is_async and 'async for' or 'for' + self.writeline('%s event in template.root_render_func(' + 'template.new_context(context.get_all(), True, ' + '%s)):' % (loop, self.dump_local_context(frame))) + elif self.environment.is_async: + self.writeline('for event in (await ' + 'template._get_default_module_async())' + '._body_stream:') + else: + if supports_yield_from: + self.writeline('yield from template._get_default_module()' + '._body_stream') + skip_event_yield = True + else: + self.writeline('for event in template._get_default_module()' + '._body_stream:') + + if not skip_event_yield: + self.indent() + self.simple_write('event', frame) + self.outdent() + + if node.ignore_missing: + self.outdent() + + def visit_Import(self, node, frame): + """Visit regular imports.""" + self.writeline('%s = ' % frame.symbols.ref(node.target), node) + if frame.toplevel: + self.write('context.vars[%r] = ' % node.target) + if self.environment.is_async: + self.write('await ') + self.write('environment.get_template(') + self.visit(node.template, frame) + self.write(', %r).' % self.name) + if node.with_context: + self.write('make_module%s(context.get_all(), True, %s)' + % (self.environment.is_async and '_async' or '', + self.dump_local_context(frame))) + elif self.environment.is_async: + self.write('_get_default_module_async()') + else: + self.write('_get_default_module()') + if frame.toplevel and not node.target.startswith('_'): + self.writeline('context.exported_vars.discard(%r)' % node.target) + + def visit_FromImport(self, node, frame): + """Visit named imports.""" + self.newline(node) + self.write('included_template = %senvironment.get_template(' + % (self.environment.is_async and 'await ' or '')) + self.visit(node.template, frame) + self.write(', %r).' % self.name) + if node.with_context: + self.write('make_module%s(context.get_all(), True, %s)' + % (self.environment.is_async and '_async' or '', + self.dump_local_context(frame))) + elif self.environment.is_async: + self.write('_get_default_module_async()') + else: + self.write('_get_default_module()') + + var_names = [] + discarded_names = [] + for name in node.names: + if isinstance(name, tuple): + name, alias = name + else: + alias = name + self.writeline('%s = getattr(included_template, ' + '%r, missing)' % (frame.symbols.ref(alias), name)) + self.writeline('if %s is missing:' % frame.symbols.ref(alias)) + self.indent() + self.writeline('%s = undefined(%r %% ' + 'included_template.__name__, ' + 'name=%r)' % + (frame.symbols.ref(alias), + 'the template %%r (imported on %s) does ' + 'not export the requested name %s' % ( + self.position(node), + repr(name) + ), name)) + self.outdent() + if frame.toplevel: + var_names.append(alias) + if not alias.startswith('_'): + discarded_names.append(alias) + + if var_names: + if len(var_names) == 1: + name = var_names[0] + self.writeline('context.vars[%r] = %s' % + (name, frame.symbols.ref(name))) + else: + self.writeline('context.vars.update({%s})' % ', '.join( + '%r: %s' % (name, frame.symbols.ref(name)) for name in var_names + )) + if discarded_names: + if len(discarded_names) == 1: + self.writeline('context.exported_vars.discard(%r)' % + discarded_names[0]) + else: + self.writeline('context.exported_vars.difference_' + 'update((%s))' % ', '.join(imap(repr, discarded_names))) + + def visit_For(self, node, frame): + loop_frame = frame.inner() + test_frame = frame.inner() + else_frame = frame.inner() + + # try to figure out if we have an extended loop. An extended loop + # is necessary if the loop is in recursive mode if the special loop + # variable is accessed in the body. + extended_loop = node.recursive or 'loop' in \ + find_undeclared(node.iter_child_nodes( + only=('body',)), ('loop',)) + + loop_ref = None + if extended_loop: + loop_ref = loop_frame.symbols.declare_parameter('loop') + + loop_frame.symbols.analyze_node(node, for_branch='body') + if node.else_: + else_frame.symbols.analyze_node(node, for_branch='else') + + if node.test: + loop_filter_func = self.temporary_identifier() + test_frame.symbols.analyze_node(node, for_branch='test') + self.writeline('%s(fiter):' % self.func(loop_filter_func), node.test) + self.indent() + self.enter_frame(test_frame) + self.writeline(self.environment.is_async and 'async for ' or 'for ') + self.visit(node.target, loop_frame) + self.write(' in ') + self.write(self.environment.is_async and 'auto_aiter(fiter)' or 'fiter') + self.write(':') + self.indent() + self.writeline('if ', node.test) + self.visit(node.test, test_frame) + self.write(':') + self.indent() + self.writeline('yield ') + self.visit(node.target, loop_frame) + self.outdent(3) + self.leave_frame(test_frame, with_python_scope=True) + + # if we don't have an recursive loop we have to find the shadowed + # variables at that point. Because loops can be nested but the loop + # variable is a special one we have to enforce aliasing for it. + if node.recursive: + self.writeline('%s(reciter, loop_render_func, depth=0):' % + self.func('loop'), node) + self.indent() + self.buffer(loop_frame) + + # Use the same buffer for the else frame + else_frame.buffer = loop_frame.buffer + + # make sure the loop variable is a special one and raise a template + # assertion error if a loop tries to write to loop + if extended_loop: + self.writeline('%s = missing' % loop_ref) + + for name in node.find_all(nodes.Name): + if name.ctx == 'store' and name.name == 'loop': + self.fail('Can\'t assign to special loop variable ' + 'in for-loop target', name.lineno) + + if node.else_: + iteration_indicator = self.temporary_identifier() + self.writeline('%s = 1' % iteration_indicator) + + self.writeline(self.environment.is_async and 'async for ' or 'for ', node) + self.visit(node.target, loop_frame) + if extended_loop: + if self.environment.is_async: + self.write(', %s in await make_async_loop_context(' % loop_ref) + else: + self.write(', %s in LoopContext(' % loop_ref) + else: + self.write(' in ') + + if node.test: + self.write('%s(' % loop_filter_func) + if node.recursive: + self.write('reciter') + else: + if self.environment.is_async and not extended_loop: + self.write('auto_aiter(') + self.visit(node.iter, frame) + if self.environment.is_async and not extended_loop: + self.write(')') + if node.test: + self.write(')') + + if node.recursive: + self.write(', undefined, loop_render_func, depth):') + else: + self.write(extended_loop and ', undefined):' or ':') + + self.indent() + self.enter_frame(loop_frame) + + self.blockvisit(node.body, loop_frame) + if node.else_: + self.writeline('%s = 0' % iteration_indicator) + self.outdent() + self.leave_frame(loop_frame, with_python_scope=node.recursive + and not node.else_) + + if node.else_: + self.writeline('if %s:' % iteration_indicator) + self.indent() + self.enter_frame(else_frame) + self.blockvisit(node.else_, else_frame) + self.leave_frame(else_frame) + self.outdent() + + # if the node was recursive we have to return the buffer contents + # and start the iteration code + if node.recursive: + self.return_buffer_contents(loop_frame) + self.outdent() + self.start_write(frame, node) + if self.environment.is_async: + self.write('await ') + self.write('loop(') + if self.environment.is_async: + self.write('auto_aiter(') + self.visit(node.iter, frame) + if self.environment.is_async: + self.write(')') + self.write(', loop)') + self.end_write(frame) + + def visit_If(self, node, frame): + if_frame = frame.soft() + self.writeline('if ', node) + self.visit(node.test, if_frame) + self.write(':') + self.indent() + self.blockvisit(node.body, if_frame) + self.outdent() + for elif_ in node.elif_: + self.writeline('elif ', elif_) + self.visit(elif_.test, if_frame) + self.write(':') + self.indent() + self.blockvisit(elif_.body, if_frame) + self.outdent() + if node.else_: + self.writeline('else:') + self.indent() + self.blockvisit(node.else_, if_frame) + self.outdent() + + def visit_Macro(self, node, frame): + macro_frame, macro_ref = self.macro_body(node, frame) + self.newline() + if frame.toplevel: + if not node.name.startswith('_'): + self.write('context.exported_vars.add(%r)' % node.name) + ref = frame.symbols.ref(node.name) + self.writeline('context.vars[%r] = ' % node.name) + self.write('%s = ' % frame.symbols.ref(node.name)) + self.macro_def(macro_ref, macro_frame) + + def visit_CallBlock(self, node, frame): + call_frame, macro_ref = self.macro_body(node, frame) + self.writeline('caller = ') + self.macro_def(macro_ref, call_frame) + self.start_write(frame, node) + self.visit_Call(node.call, frame, forward_caller=True) + self.end_write(frame) + + def visit_FilterBlock(self, node, frame): + filter_frame = frame.inner() + filter_frame.symbols.analyze_node(node) + self.enter_frame(filter_frame) + self.buffer(filter_frame) + self.blockvisit(node.body, filter_frame) + self.start_write(frame, node) + self.visit_Filter(node.filter, filter_frame) + self.end_write(frame) + self.leave_frame(filter_frame) + + def visit_With(self, node, frame): + with_frame = frame.inner() + with_frame.symbols.analyze_node(node) + self.enter_frame(with_frame) + for idx, (target, expr) in enumerate(izip(node.targets, node.values)): + self.newline() + self.visit(target, with_frame) + self.write(' = ') + self.visit(expr, frame) + self.blockvisit(node.body, with_frame) + self.leave_frame(with_frame) + + def visit_ExprStmt(self, node, frame): + self.newline(node) + self.visit(node.node, frame) + + def visit_Output(self, node, frame): + # if we have a known extends statement, we don't output anything + # if we are in a require_output_check section + if self.has_known_extends and frame.require_output_check: + return + + allow_constant_finalize = True + if self.environment.finalize: + func = self.environment.finalize + if getattr(func, 'contextfunction', False) or \ + getattr(func, 'evalcontextfunction', False): + allow_constant_finalize = False + elif getattr(func, 'environmentfunction', False): + finalize = lambda x: text_type( + self.environment.finalize(self.environment, x)) + else: + finalize = lambda x: text_type(self.environment.finalize(x)) + else: + finalize = text_type + + # if we are inside a frame that requires output checking, we do so + outdent_later = False + if frame.require_output_check: + self.writeline('if parent_template is None:') + self.indent() + outdent_later = True + + # try to evaluate as many chunks as possible into a static + # string at compile time. + body = [] + for child in node.nodes: + try: + if not allow_constant_finalize: + raise nodes.Impossible() + const = child.as_const(frame.eval_ctx) + except nodes.Impossible: + body.append(child) + continue + # the frame can't be volatile here, becaus otherwise the + # as_const() function would raise an Impossible exception + # at that point. + try: + if frame.eval_ctx.autoescape: + if hasattr(const, '__html__'): + const = const.__html__() + else: + const = escape(const) + const = finalize(const) + except Exception: + # if something goes wrong here we evaluate the node + # at runtime for easier debugging + body.append(child) + continue + if body and isinstance(body[-1], list): + body[-1].append(const) + else: + body.append([const]) + + # if we have less than 3 nodes or a buffer we yield or extend/append + if len(body) < 3 or frame.buffer is not None: + if frame.buffer is not None: + # for one item we append, for more we extend + if len(body) == 1: + self.writeline('%s.append(' % frame.buffer) + else: + self.writeline('%s.extend((' % frame.buffer) + self.indent() + for item in body: + if isinstance(item, list): + val = repr(concat(item)) + if frame.buffer is None: + self.writeline('yield ' + val) + else: + self.writeline(val + ',') + else: + if frame.buffer is None: + self.writeline('yield ', item) + else: + self.newline(item) + close = 1 + if frame.eval_ctx.volatile: + self.write('(escape if context.eval_ctx.autoescape' + ' else to_string)(') + elif frame.eval_ctx.autoescape: + self.write('escape(') + else: + self.write('to_string(') + if self.environment.finalize is not None: + self.write('environment.finalize(') + if getattr(self.environment.finalize, + "contextfunction", False): + self.write('context, ') + close += 1 + self.visit(item, frame) + self.write(')' * close) + if frame.buffer is not None: + self.write(',') + if frame.buffer is not None: + # close the open parentheses + self.outdent() + self.writeline(len(body) == 1 and ')' or '))') + + # otherwise we create a format string as this is faster in that case + else: + format = [] + arguments = [] + for item in body: + if isinstance(item, list): + format.append(concat(item).replace('%', '%%')) + else: + format.append('%s') + arguments.append(item) + self.writeline('yield ') + self.write(repr(concat(format)) + ' % (') + self.indent() + for argument in arguments: + self.newline(argument) + close = 0 + if frame.eval_ctx.volatile: + self.write('(escape if context.eval_ctx.autoescape else' + ' to_string)(') + close += 1 + elif frame.eval_ctx.autoescape: + self.write('escape(') + close += 1 + if self.environment.finalize is not None: + self.write('environment.finalize(') + if getattr(self.environment.finalize, + 'contextfunction', False): + self.write('context, ') + elif getattr(self.environment.finalize, + 'evalcontextfunction', False): + self.write('context.eval_ctx, ') + elif getattr(self.environment.finalize, + 'environmentfunction', False): + self.write('environment, ') + close += 1 + self.visit(argument, frame) + self.write(')' * close + ', ') + self.outdent() + self.writeline(')') + + if outdent_later: + self.outdent() + + def visit_Assign(self, node, frame): + self.push_assign_tracking() + self.newline(node) + self.visit(node.target, frame) + self.write(' = ') + self.visit(node.node, frame) + self.pop_assign_tracking(frame) + + def visit_AssignBlock(self, node, frame): + self.push_assign_tracking() + block_frame = frame.inner() + # This is a special case. Since a set block always captures we + # will disable output checks. This way one can use set blocks + # toplevel even in extended templates. + block_frame.require_output_check = False + block_frame.symbols.analyze_node(node) + self.enter_frame(block_frame) + self.buffer(block_frame) + self.blockvisit(node.body, block_frame) + self.newline(node) + self.visit(node.target, frame) + self.write(' = (Markup if context.eval_ctx.autoescape ' + 'else identity)(') + if node.filter is not None: + self.visit_Filter(node.filter, block_frame) + else: + self.write('concat(%s)' % block_frame.buffer) + self.write(')') + self.pop_assign_tracking(frame) + self.leave_frame(block_frame) + + # -- Expression Visitors + + def visit_Name(self, node, frame): + if node.ctx == 'store' and frame.toplevel: + if self._assign_stack: + self._assign_stack[-1].add(node.name) + ref = frame.symbols.ref(node.name) + + # If we are looking up a variable we might have to deal with the + # case where it's undefined. We can skip that case if the load + # instruction indicates a parameter which are always defined. + if node.ctx == 'load': + load = frame.symbols.find_load(ref) + if not (load is not None and load[0] == VAR_LOAD_PARAMETER and \ + not self.parameter_is_undeclared(ref)): + self.write('(undefined(name=%r) if %s is missing else %s)' % + (node.name, ref, ref)) + return + + self.write(ref) + + def visit_NSRef(self, node, frame): + # NSRefs can only be used to store values; since they use the normal + # `foo.bar` notation they will be parsed as a normal attribute access + # when used anywhere but in a `set` context + ref = frame.symbols.ref(node.name) + self.writeline('if not isinstance(%s, Namespace):' % ref) + self.indent() + self.writeline('raise TemplateRuntimeError(%r)' % + 'cannot assign attribute on non-namespace object') + self.outdent() + self.writeline('%s[%r]' % (ref, node.attr)) + + def visit_Const(self, node, frame): + val = node.as_const(frame.eval_ctx) + if isinstance(val, float): + self.write(str(val)) + else: + self.write(repr(val)) + + def visit_TemplateData(self, node, frame): + try: + self.write(repr(node.as_const(frame.eval_ctx))) + except nodes.Impossible: + self.write('(Markup if context.eval_ctx.autoescape else identity)(%r)' + % node.data) + + def visit_Tuple(self, node, frame): + self.write('(') + idx = -1 + for idx, item in enumerate(node.items): + if idx: + self.write(', ') + self.visit(item, frame) + self.write(idx == 0 and ',)' or ')') + + def visit_List(self, node, frame): + self.write('[') + for idx, item in enumerate(node.items): + if idx: + self.write(', ') + self.visit(item, frame) + self.write(']') + + def visit_Dict(self, node, frame): + self.write('{') + for idx, item in enumerate(node.items): + if idx: + self.write(', ') + self.visit(item.key, frame) + self.write(': ') + self.visit(item.value, frame) + self.write('}') + + def binop(operator, interceptable=True): + @optimizeconst + def visitor(self, node, frame): + if self.environment.sandboxed and \ + operator in self.environment.intercepted_binops: + self.write('environment.call_binop(context, %r, ' % operator) + self.visit(node.left, frame) + self.write(', ') + self.visit(node.right, frame) + else: + self.write('(') + self.visit(node.left, frame) + self.write(' %s ' % operator) + self.visit(node.right, frame) + self.write(')') + return visitor + + def uaop(operator, interceptable=True): + @optimizeconst + def visitor(self, node, frame): + if self.environment.sandboxed and \ + operator in self.environment.intercepted_unops: + self.write('environment.call_unop(context, %r, ' % operator) + self.visit(node.node, frame) + else: + self.write('(' + operator) + self.visit(node.node, frame) + self.write(')') + return visitor + + visit_Add = binop('+') + visit_Sub = binop('-') + visit_Mul = binop('*') + visit_Div = binop('/') + visit_FloorDiv = binop('//') + visit_Pow = binop('**') + visit_Mod = binop('%') + visit_And = binop('and', interceptable=False) + visit_Or = binop('or', interceptable=False) + visit_Pos = uaop('+') + visit_Neg = uaop('-') + visit_Not = uaop('not ', interceptable=False) + del binop, uaop + + @optimizeconst + def visit_Concat(self, node, frame): + if frame.eval_ctx.volatile: + func_name = '(context.eval_ctx.volatile and' \ + ' markup_join or unicode_join)' + elif frame.eval_ctx.autoescape: + func_name = 'markup_join' + else: + func_name = 'unicode_join' + self.write('%s((' % func_name) + for arg in node.nodes: + self.visit(arg, frame) + self.write(', ') + self.write('))') + + @optimizeconst + def visit_Compare(self, node, frame): + self.visit(node.expr, frame) + for op in node.ops: + self.visit(op, frame) + + def visit_Operand(self, node, frame): + self.write(' %s ' % operators[node.op]) + self.visit(node.expr, frame) + + @optimizeconst + def visit_Getattr(self, node, frame): + self.write('environment.getattr(') + self.visit(node.node, frame) + self.write(', %r)' % node.attr) + + @optimizeconst + def visit_Getitem(self, node, frame): + # slices bypass the environment getitem method. + if isinstance(node.arg, nodes.Slice): + self.visit(node.node, frame) + self.write('[') + self.visit(node.arg, frame) + self.write(']') + else: + self.write('environment.getitem(') + self.visit(node.node, frame) + self.write(', ') + self.visit(node.arg, frame) + self.write(')') + + def visit_Slice(self, node, frame): + if node.start is not None: + self.visit(node.start, frame) + self.write(':') + if node.stop is not None: + self.visit(node.stop, frame) + if node.step is not None: + self.write(':') + self.visit(node.step, frame) + + @optimizeconst + def visit_Filter(self, node, frame): + if self.environment.is_async: + self.write('await auto_await(') + self.write(self.filters[node.name] + '(') + func = self.environment.filters.get(node.name) + if func is None: + self.fail('no filter named %r' % node.name, node.lineno) + if getattr(func, 'contextfilter', False): + self.write('context, ') + elif getattr(func, 'evalcontextfilter', False): + self.write('context.eval_ctx, ') + elif getattr(func, 'environmentfilter', False): + self.write('environment, ') + + # if the filter node is None we are inside a filter block + # and want to write to the current buffer + if node.node is not None: + self.visit(node.node, frame) + elif frame.eval_ctx.volatile: + self.write('(context.eval_ctx.autoescape and' + ' Markup(concat(%s)) or concat(%s))' % + (frame.buffer, frame.buffer)) + elif frame.eval_ctx.autoescape: + self.write('Markup(concat(%s))' % frame.buffer) + else: + self.write('concat(%s)' % frame.buffer) + self.signature(node, frame) + self.write(')') + if self.environment.is_async: + self.write(')') + + @optimizeconst + def visit_Test(self, node, frame): + self.write(self.tests[node.name] + '(') + if node.name not in self.environment.tests: + self.fail('no test named %r' % node.name, node.lineno) + self.visit(node.node, frame) + self.signature(node, frame) + self.write(')') + + @optimizeconst + def visit_CondExpr(self, node, frame): + def write_expr2(): + if node.expr2 is not None: + return self.visit(node.expr2, frame) + self.write('undefined(%r)' % ('the inline if-' + 'expression on %s evaluated to false and ' + 'no else section was defined.' % self.position(node))) + + self.write('(') + self.visit(node.expr1, frame) + self.write(' if ') + self.visit(node.test, frame) + self.write(' else ') + write_expr2() + self.write(')') + + @optimizeconst + def visit_Call(self, node, frame, forward_caller=False): + if self.environment.is_async: + self.write('await auto_await(') + if self.environment.sandboxed: + self.write('environment.call(context, ') + else: + self.write('context.call(') + self.visit(node.node, frame) + extra_kwargs = forward_caller and {'caller': 'caller'} or None + self.signature(node, frame, extra_kwargs) + self.write(')') + if self.environment.is_async: + self.write(')') + + def visit_Keyword(self, node, frame): + self.write(node.key + '=') + self.visit(node.value, frame) + + # -- Unused nodes for extensions + + def visit_MarkSafe(self, node, frame): + self.write('Markup(') + self.visit(node.expr, frame) + self.write(')') + + def visit_MarkSafeIfAutoescape(self, node, frame): + self.write('(context.eval_ctx.autoescape and Markup or identity)(') + self.visit(node.expr, frame) + self.write(')') + + def visit_EnvironmentAttribute(self, node, frame): + self.write('environment.' + node.name) + + def visit_ExtensionAttribute(self, node, frame): + self.write('environment.extensions[%r].%s' % (node.identifier, node.name)) + + def visit_ImportedName(self, node, frame): + self.write(self.import_aliases[node.importname]) + + def visit_InternalName(self, node, frame): + self.write(node.name) + + def visit_ContextReference(self, node, frame): + self.write('context') + + def visit_Continue(self, node, frame): + self.writeline('continue', node) + + def visit_Break(self, node, frame): + self.writeline('break', node) + + def visit_Scope(self, node, frame): + scope_frame = frame.inner() + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + + def visit_OverlayScope(self, node, frame): + ctx = self.temporary_identifier() + self.writeline('%s = %s' % (ctx, self.derive_context(frame))) + self.writeline('%s.vars = ' % ctx) + self.visit(node.context, frame) + self.push_context_reference(ctx) + + scope_frame = frame.inner(isolated=True) + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + self.pop_context_reference() + + def visit_EvalContextModifier(self, node, frame): + for keyword in node.options: + self.writeline('context.eval_ctx.%s = ' % keyword.key) + self.visit(keyword.value, frame) + try: + val = keyword.value.as_const(frame.eval_ctx) + except nodes.Impossible: + frame.eval_ctx.volatile = True + else: + setattr(frame.eval_ctx, keyword.key, val) + + def visit_ScopedEvalContextModifier(self, node, frame): + old_ctx_name = self.temporary_identifier() + saved_ctx = frame.eval_ctx.save() + self.writeline('%s = context.eval_ctx.save()' % old_ctx_name) + self.visit_EvalContextModifier(node, frame) + for child in node.body: + self.visit(child, frame) + frame.eval_ctx.revert(saved_ctx) + self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name) diff --git a/py3/lib/python3.6/site-packages/jinja2/constants.py b/py3/lib/python3.6/site-packages/jinja2/constants.py new file mode 100644 index 0000000..11efd1e --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/constants.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +""" + jinja.constants + ~~~~~~~~~~~~~~~ + + Various constants. + + :copyright: (c) 2017 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" + + +#: list of lorem ipsum words used by the lipsum() helper function +LOREM_IPSUM_WORDS = u'''\ +a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at +auctor augue bibendum blandit class commodo condimentum congue consectetuer +consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus +diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend +elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames +faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac +hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum +justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem +luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie +mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non +nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque +penatibus per pharetra phasellus placerat platea porta porttitor posuere +potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus +ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit +sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor +tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices +ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus +viverra volutpat vulputate''' diff --git a/py3/lib/python3.6/site-packages/jinja2/debug.py b/py3/lib/python3.6/site-packages/jinja2/debug.py new file mode 100644 index 0000000..b61139f --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/debug.py @@ -0,0 +1,372 @@ +# -*- coding: utf-8 -*- +""" + jinja2.debug + ~~~~~~~~~~~~ + + Implements the debug interface for Jinja. This module does some pretty + ugly stuff with the Python traceback system in order to achieve tracebacks + with correct line numbers, locals and contents. + + :copyright: (c) 2017 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +import sys +import traceback +from types import TracebackType, CodeType +from jinja2.utils import missing, internal_code +from jinja2.exceptions import TemplateSyntaxError +from jinja2._compat import iteritems, reraise, PY2 + +# on pypy we can take advantage of transparent proxies +try: + from __pypy__ import tproxy +except ImportError: + tproxy = None + + +# how does the raise helper look like? +try: + exec("raise TypeError, 'foo'") +except SyntaxError: + raise_helper = 'raise __jinja_exception__[1]' +except TypeError: + raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]' + + +class TracebackFrameProxy(object): + """Proxies a traceback frame.""" + + def __init__(self, tb): + self.tb = tb + self._tb_next = None + + @property + def tb_next(self): + return self._tb_next + + def set_next(self, next): + if tb_set_next is not None: + try: + tb_set_next(self.tb, next and next.tb or None) + except Exception: + # this function can fail due to all the hackery it does + # on various python implementations. We just catch errors + # down and ignore them if necessary. + pass + self._tb_next = next + + @property + def is_jinja_frame(self): + return '__jinja_template__' in self.tb.tb_frame.f_globals + + def __getattr__(self, name): + return getattr(self.tb, name) + + +def make_frame_proxy(frame): + proxy = TracebackFrameProxy(frame) + if tproxy is None: + return proxy + def operation_handler(operation, *args, **kwargs): + if operation in ('__getattribute__', '__getattr__'): + return getattr(proxy, args[0]) + elif operation == '__setattr__': + proxy.__setattr__(*args, **kwargs) + else: + return getattr(proxy, operation)(*args, **kwargs) + return tproxy(TracebackType, operation_handler) + + +class ProcessedTraceback(object): + """Holds a Jinja preprocessed traceback for printing or reraising.""" + + def __init__(self, exc_type, exc_value, frames): + assert frames, 'no frames for this traceback?' + self.exc_type = exc_type + self.exc_value = exc_value + self.frames = frames + + # newly concatenate the frames (which are proxies) + prev_tb = None + for tb in self.frames: + if prev_tb is not None: + prev_tb.set_next(tb) + prev_tb = tb + prev_tb.set_next(None) + + def render_as_text(self, limit=None): + """Return a string with the traceback.""" + lines = traceback.format_exception(self.exc_type, self.exc_value, + self.frames[0], limit=limit) + return ''.join(lines).rstrip() + + def render_as_html(self, full=False): + """Return a unicode string with the traceback as rendered HTML.""" + from jinja2.debugrenderer import render_traceback + return u'%s\n\n' % ( + render_traceback(self, full=full), + self.render_as_text().decode('utf-8', 'replace') + ) + + @property + def is_template_syntax_error(self): + """`True` if this is a template syntax error.""" + return isinstance(self.exc_value, TemplateSyntaxError) + + @property + def exc_info(self): + """Exception info tuple with a proxy around the frame objects.""" + return self.exc_type, self.exc_value, self.frames[0] + + @property + def standard_exc_info(self): + """Standard python exc_info for re-raising""" + tb = self.frames[0] + # the frame will be an actual traceback (or transparent proxy) if + # we are on pypy or a python implementation with support for tproxy + if type(tb) is not TracebackType: + tb = tb.tb + return self.exc_type, self.exc_value, tb + + +def make_traceback(exc_info, source_hint=None): + """Creates a processed traceback object from the exc_info.""" + exc_type, exc_value, tb = exc_info + if isinstance(exc_value, TemplateSyntaxError): + exc_info = translate_syntax_error(exc_value, source_hint) + initial_skip = 0 + else: + initial_skip = 1 + return translate_exception(exc_info, initial_skip) + + +def translate_syntax_error(error, source=None): + """Rewrites a syntax error to please traceback systems.""" + error.source = source + error.translated = True + exc_info = (error.__class__, error, None) + filename = error.filename + if filename is None: + filename = '' + return fake_exc_info(exc_info, filename, error.lineno) + + +def translate_exception(exc_info, initial_skip=0): + """If passed an exc_info it will automatically rewrite the exceptions + all the way down to the correct line numbers and frames. + """ + tb = exc_info[2] + frames = [] + + # skip some internal frames if wanted + for x in range(initial_skip): + if tb is not None: + tb = tb.tb_next + initial_tb = tb + + while tb is not None: + # skip frames decorated with @internalcode. These are internal + # calls we can't avoid and that are useless in template debugging + # output. + if tb.tb_frame.f_code in internal_code: + tb = tb.tb_next + continue + + # save a reference to the next frame if we override the current + # one with a faked one. + next = tb.tb_next + + # fake template exceptions + template = tb.tb_frame.f_globals.get('__jinja_template__') + if template is not None: + lineno = template.get_corresponding_lineno(tb.tb_lineno) + tb = fake_exc_info(exc_info[:2] + (tb,), template.filename, + lineno)[2] + + frames.append(make_frame_proxy(tb)) + tb = next + + # if we don't have any exceptions in the frames left, we have to + # reraise it unchanged. + # XXX: can we backup here? when could this happen? + if not frames: + reraise(exc_info[0], exc_info[1], exc_info[2]) + + return ProcessedTraceback(exc_info[0], exc_info[1], frames) + + +def get_jinja_locals(real_locals): + ctx = real_locals.get('context') + if ctx: + locals = ctx.get_all().copy() + else: + locals = {} + + local_overrides = {} + + for name, value in iteritems(real_locals): + if not name.startswith('l_') or value is missing: + continue + try: + _, depth, name = name.split('_', 2) + depth = int(depth) + except ValueError: + continue + cur_depth = local_overrides.get(name, (-1,))[0] + if cur_depth < depth: + local_overrides[name] = (depth, value) + + for name, (_, value) in iteritems(local_overrides): + if value is missing: + locals.pop(name, None) + else: + locals[name] = value + + return locals + + +def fake_exc_info(exc_info, filename, lineno): + """Helper for `translate_exception`.""" + exc_type, exc_value, tb = exc_info + + # figure the real context out + if tb is not None: + locals = get_jinja_locals(tb.tb_frame.f_locals) + + # if there is a local called __jinja_exception__, we get + # rid of it to not break the debug functionality. + locals.pop('__jinja_exception__', None) + else: + locals = {} + + # assamble fake globals we need + globals = { + '__name__': filename, + '__file__': filename, + '__jinja_exception__': exc_info[:2], + + # we don't want to keep the reference to the template around + # to not cause circular dependencies, but we mark it as Jinja + # frame for the ProcessedTraceback + '__jinja_template__': None + } + + # and fake the exception + code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec') + + # if it's possible, change the name of the code. This won't work + # on some python environments such as google appengine + try: + if tb is None: + location = 'template' + else: + function = tb.tb_frame.f_code.co_name + if function == 'root': + location = 'top-level template code' + elif function.startswith('block_'): + location = 'block "%s"' % function[6:] + else: + location = 'template' + + if PY2: + code = CodeType(0, code.co_nlocals, code.co_stacksize, + code.co_flags, code.co_code, code.co_consts, + code.co_names, code.co_varnames, filename, + location, code.co_firstlineno, + code.co_lnotab, (), ()) + else: + code = CodeType(0, code.co_kwonlyargcount, + code.co_nlocals, code.co_stacksize, + code.co_flags, code.co_code, code.co_consts, + code.co_names, code.co_varnames, filename, + location, code.co_firstlineno, + code.co_lnotab, (), ()) + except Exception as e: + pass + + # execute the code and catch the new traceback + try: + exec(code, globals, locals) + except: + exc_info = sys.exc_info() + new_tb = exc_info[2].tb_next + + # return without this frame + return exc_info[:2] + (new_tb,) + + +def _init_ugly_crap(): + """This function implements a few ugly things so that we can patch the + traceback objects. The function returned allows resetting `tb_next` on + any python traceback object. Do not attempt to use this on non cpython + interpreters + """ + import ctypes + from types import TracebackType + + if PY2: + # figure out size of _Py_ssize_t for Python 2: + if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'): + _Py_ssize_t = ctypes.c_int64 + else: + _Py_ssize_t = ctypes.c_int + else: + # platform ssize_t on Python 3 + _Py_ssize_t = ctypes.c_ssize_t + + # regular python + class _PyObject(ctypes.Structure): + pass + _PyObject._fields_ = [ + ('ob_refcnt', _Py_ssize_t), + ('ob_type', ctypes.POINTER(_PyObject)) + ] + + # python with trace + if hasattr(sys, 'getobjects'): + class _PyObject(ctypes.Structure): + pass + _PyObject._fields_ = [ + ('_ob_next', ctypes.POINTER(_PyObject)), + ('_ob_prev', ctypes.POINTER(_PyObject)), + ('ob_refcnt', _Py_ssize_t), + ('ob_type', ctypes.POINTER(_PyObject)) + ] + + class _Traceback(_PyObject): + pass + _Traceback._fields_ = [ + ('tb_next', ctypes.POINTER(_Traceback)), + ('tb_frame', ctypes.POINTER(_PyObject)), + ('tb_lasti', ctypes.c_int), + ('tb_lineno', ctypes.c_int) + ] + + def tb_set_next(tb, next): + """Set the tb_next attribute of a traceback object.""" + if not (isinstance(tb, TracebackType) and + (next is None or isinstance(next, TracebackType))): + raise TypeError('tb_set_next arguments must be traceback objects') + obj = _Traceback.from_address(id(tb)) + if tb.tb_next is not None: + old = _Traceback.from_address(id(tb.tb_next)) + old.ob_refcnt -= 1 + if next is None: + obj.tb_next = ctypes.POINTER(_Traceback)() + else: + next = _Traceback.from_address(id(next)) + next.ob_refcnt += 1 + obj.tb_next = ctypes.pointer(next) + + return tb_set_next + + +# try to get a tb_set_next implementation if we don't have transparent +# proxies. +tb_set_next = None +if tproxy is None: + try: + tb_set_next = _init_ugly_crap() + except: + pass + del _init_ugly_crap diff --git a/py3/lib/python3.6/site-packages/jinja2/defaults.py b/py3/lib/python3.6/site-packages/jinja2/defaults.py new file mode 100644 index 0000000..7c93dec --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/defaults.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +""" + jinja2.defaults + ~~~~~~~~~~~~~~~ + + Jinja default filters and tags. + + :copyright: (c) 2017 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +from jinja2._compat import range_type +from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner, Namespace + + +# defaults for the parser / lexer +BLOCK_START_STRING = '{%' +BLOCK_END_STRING = '%}' +VARIABLE_START_STRING = '{{' +VARIABLE_END_STRING = '}}' +COMMENT_START_STRING = '{#' +COMMENT_END_STRING = '#}' +LINE_STATEMENT_PREFIX = None +LINE_COMMENT_PREFIX = None +TRIM_BLOCKS = False +LSTRIP_BLOCKS = False +NEWLINE_SEQUENCE = '\n' +KEEP_TRAILING_NEWLINE = False + + +# default filters, tests and namespace +from jinja2.filters import FILTERS as DEFAULT_FILTERS +from jinja2.tests import TESTS as DEFAULT_TESTS +DEFAULT_NAMESPACE = { + 'range': range_type, + 'dict': dict, + 'lipsum': generate_lorem_ipsum, + 'cycler': Cycler, + 'joiner': Joiner, + 'namespace': Namespace +} + + +# default policies +DEFAULT_POLICIES = { + 'compiler.ascii_str': True, + 'urlize.rel': 'noopener', + 'urlize.target': None, + 'truncate.leeway': 5, + 'json.dumps_function': None, + 'json.dumps_kwargs': {'sort_keys': True}, + 'ext.i18n.trimmed': False, +} + + +# export all constants +__all__ = tuple(x for x in locals().keys() if x.isupper()) diff --git a/py3/lib/python3.6/site-packages/jinja2/environment.py b/py3/lib/python3.6/site-packages/jinja2/environment.py new file mode 100644 index 0000000..549d9af --- /dev/null +++ b/py3/lib/python3.6/site-packages/jinja2/environment.py @@ -0,0 +1,1276 @@ +# -*- coding: utf-8 -*- +""" + jinja2.environment + ~~~~~~~~~~~~~~~~~~ + + Provides a class that holds runtime and parsing time options. + + :copyright: (c) 2017 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +import os +import sys +import weakref +from functools import reduce, partial +from jinja2 import nodes +from jinja2.defaults import BLOCK_START_STRING, \ + BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \ + COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \ + LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \ + DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \ + DEFAULT_POLICIES, KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS +from jinja2.lexer import get_lexer, TokenStream +from jinja2.parser import Parser +from jinja2.nodes import EvalContext +from jinja2.compiler import generate, CodeGenerator +from jinja2.runtime import Undefined, new_context, Context +from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \ + TemplatesNotFound, TemplateRuntimeError +from jinja2.utils import import_string, LRUCache, Markup, missing, \ + concat, consume, internalcode, have_async_gen +from jinja2._compat import imap, ifilter, string_types, iteritems, \ + text_type, reraise, implements_iterator, implements_to_string, \ + encode_filename, PY2, PYPY + + +# for direct template usage we have up to ten living environments +_spontaneous_environments = LRUCache(10) + +# the function to create jinja traceback objects. This is dynamically +# imported on the first exception in the exception handler. +_make_traceback = None + + +def get_spontaneous_environment(*args): + """Return a new spontaneous environment. A spontaneous environment is an + unnamed and unaccessible (in theory) environment that is used for + templates generated from a string and not from the file system. + """ + try: + env = _spontaneous_environments.get(args) + except TypeError: + return Environment(*args) + if env is not None: + return env + _spontaneous_environments[args] = env = Environment(*args) + env.shared = True + return env + + +def create_cache(size): + """Return the cache class for the given size.""" + if size == 0: + return None + if size < 0: + return {} + return LRUCache(size) + + +def copy_cache(cache): + """Create an empty copy of the given cache.""" + if cache is None: + return None + elif type(cache) is dict: + return {} + return LRUCache(cache.capacity) + + +def load_extensions(environment, extensions): + """Load the extensions from the list and bind it to the environment. + Returns a dict of instantiated environments. + """ + result = {} + for extension in extensions: + if isinstance(extension, string_types): + extension = import_string(extension) + result[extension.identifier] = extension(environment) + return result + + +def fail_for_missing_callable(string, name): + msg = string % name + if isinstance(name, Undefined): + try: + name._fail_with_undefined_error() + except Exception as e: + msg = '%s (%s; did you forget to quote the callable name?)' % (msg, e) + raise TemplateRuntimeError(msg) + + +def _environment_sanity_check(environment): + """Perform a sanity check on the environment.""" + assert issubclass(environment.undefined, Undefined), 'undefined must ' \ + 'be a subclass of undefined because filters depend on it.' + assert environment.block_start_string != \ + environment.variable_start_string != \ + environment.comment_start_string, 'block, variable and comment ' \ + 'start strings must be different' + assert environment.newline_sequence in ('\r', '\r\n', '\n'), \ + 'newline_sequence set to unknown line ending string.' + return environment + + +class Environment(object): + r"""The core component of Jinja is the `Environment`. It contains + important shared variables like configuration, filters, tests, + globals and others. Instances of this class may be modified if + they are not shared and if no template was loaded so far. + Modifications on environments after the first template was loaded + will lead to surprising effects and undefined behavior. + + Here are the possible initialization parameters: + + `block_start_string` + The string marking the beginning of a block. Defaults to ``'{%'``. + + `block_end_string` + The string marking the end of a block. Defaults to ``'%}'``. + + `variable_start_string` + The string marking the beginning of a print statement. + Defaults to ``'{{'``. + + `variable_end_string` + The string marking the end of a print statement. Defaults to + ``'}}'``. + + `comment_start_string` + The string marking the beginning of a comment. Defaults to ``'{#'``. + + `comment_end_string` + The string marking the end of a comment. Defaults to ``'#}'``. + + `line_statement_prefix` + If given and a string, this will be used as prefix for line based + statements. See also :ref:`line-statements`. + + `line_comment_prefix` + If given and a string, this will be used as prefix for line based + comments. See also :ref:`line-statements`. + + .. versionadded:: 2.2 + + `trim_blocks` + If this is set to ``True`` the first newline after a block is + removed (block, not variable tag!). Defaults to `False`. + + `lstrip_blocks` + If this is set to ``True`` leading spaces and tabs are stripped + from the start of a line to a block. Defaults to `False`. + + `newline_sequence` + The sequence that starts a newline. Must be one of ``'\r'``, + ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a + useful default for Linux and OS X systems as well as web + applications. + + `keep_trailing_newline` + Preserve the trailing newline when rendering templates. + The default is ``False``, which causes a single newline, + if present, to be stripped from the end of the template. + + .. versionadded:: 2.7 + + `extensions` + List of Jinja extensions to use. This can either be import paths + as strings or extension classes. For more information have a + look at :ref:`the extensions documentation `. + + `optimized` + should the optimizer be enabled? Default is ``True``. + + `undefined` + :class:`Undefined` or a subclass of it that is used to represent + undefined values in the template. + + `finalize` + A callable that can be used to process the result of a variable + expression before it is output. For example one can convert + ``None`` implicitly into an empty string here. + + `autoescape` + If set to ``True`` the XML/HTML autoescaping feature is enabled by + default. For more details about autoescaping see + :class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also + be a callable that is passed the template name and has to + return ``True`` or ``False`` depending on autoescape should be + enabled by default. + + .. versionchanged:: 2.4 + `autoescape` can now be a function + + `loader` + The template loader for this environment. + + `cache_size` + The size of the cache. Per default this is ``400`` which means + that if more than 400 templates are loaded the loader will clean + out the least recently used template. If the cache size is set to + ``0`` templates are recompiled all the time, if the cache size is + ``-1`` the cache will not be cleaned. + + .. versionchanged:: 2.8 + The cache size was increased to 400 from a low 50. + + `auto_reload` + Some loaders load templates from locations where the template + sources may change (ie: file system or database). If + ``auto_reload`` is set to ``True`` (default) every time a template is + requested the loader checks if the source changed and if yes, it + will reload the template. For higher performance it's possible to + disable that. + + `bytecode_cache` + If set to a bytecode cache object, this object will provide a + cache for the internal Jinja bytecode so that templates don't + have to be parsed if they were not changed. + + See :ref:`bytecode-cache` for more information. + + `enable_async` + If set to true this enables async template execution which allows + you to take advantage of newer Python features. This requires + Python 3.6 or later. + """ + + #: if this environment is sandboxed. Modifying this variable won't make + #: the environment sandboxed though. For a real sandboxed environment + #: have a look at jinja2.sandbox. This flag alone controls the code + #: generation by the compiler. + sandboxed = False + + #: True if the environment is just an overlay + overlayed = False + + #: the environment this environment is linked to if it is an overlay + linked_to = None + + #: shared environments have this set to `True`. A shared environment + #: must not be modified + shared = False + + #: these are currently EXPERIMENTAL undocumented features. + exception_handler = None + exception_formatter = None + + #: the class that is used for code generation. See + #: :class:`~jinja2.compiler.CodeGenerator` for more information. + code_generator_class = CodeGenerator + + #: the context class thatis used for templates. See + #: :class:`~jinja2.runtime.Context` for more information. + context_class = Context + + def __init__(self, + block_start_string=BLOCK_START_STRING, + block_end_string=BLOCK_END_STRING, + variable_start_string=VARIABLE_START_STRING, + variable_end_string=VARIABLE_END_STRING, + comment_start_string=COMMENT_START_STRING, + comment_end_string=COMMENT_END_STRING, + line_statement_prefix=LINE_STATEMENT_PREFIX, + line_comment_prefix=LINE_COMMENT_PREFIX, + trim_blocks=TRIM_BLOCKS, + lstrip_blocks=LSTRIP_BLOCKS, + newline_sequence=NEWLINE_SEQUENCE, + keep_trailing_newline=KEEP_TRAILING_NEWLINE, + extensions=(), + optimized=True, + undefined=Undefined, + finalize=None, + autoescape=False, + loader=None, + cache_size=400, + auto_reload=True, + bytecode_cache=None, + enable_async=False): + # !!Important notice!! + # The constructor accepts quite a few arguments that should be + # passed by keyword rather than position. However it's important to + # not change the order of arguments because it's used at least + # internally in those cases: + # - spontaneous environments (i18n extension and Template) + # - unittests + # If parameter changes are required only add parameters at the end + # and don't change the arguments (or the defaults!) of the arguments + # existing already. + + # lexer / parser information + self.block_start_string = block_start_string + self.block_end_string = block_end_string + self.variable_start_string = variable_start_string + self.variable_end_string = variable_end_string + self.comment_start_string = comment_start_string + self.comment_end_string = comment_end_string + self.line_statement_prefix = line_statement_prefix + self.line_comment_prefix = line_comment_prefix + self.trim_blocks = trim_blocks + self.lstrip_blocks = lstrip_blocks + self.newline_sequence = newline_sequence + self.keep_trailing_newline = keep_trailing_newline + + # runtime information + self.undefined = undefined + self.optimized = optimized + self.finalize = finalize + self.autoescape = autoescape + + # defaults + self.filters = DEFAULT_FILTERS.copy() + self.tests = DEFAULT_TESTS.copy() + self.globals = DEFAULT_NAMESPACE.copy() + + # set the loader provided + self.loader = loader + self.cache = create_cache(cache_size) + self.bytecode_cache = bytecode_cache + self.auto_reload = auto_reload + + # configurable policies + self.policies = DEFAULT_POLICIES.copy() + + # load extensions + self.extensions = load_extensions(self, extensions) + + self.enable_async = enable_async + self.is_async = self.enable_async and have_async_gen + + _environment_sanity_check(self) + + def add_extension(self, extension): + """Adds an extension after the environment was created. + + .. versionadded:: 2.5 + """ + self.extensions.update(load_extensions(self, [extension])) + + def extend(self, **attributes): + """Add the items to the instance of the environment if they do not exist + yet. This is used by :ref:`extensions ` to register + callbacks and configuration values without breaking inheritance. + """ + for key, value in iteritems(attributes): + if not hasattr(self, key): + setattr(self, key, value) + + def overlay(self, block_start_string=missing, block_end_string=missing, + variable_start_string=missing, variable_end_string=missing, + comment_start_string=missing, comment_end_string=missing, + line_statement_prefix=missing, line_comment_prefix=missing, + trim_blocks=missing, lstrip_blocks=missing, + extensions=missing, optimized=missing, + undefined=missing, finalize=missing, autoescape=missing, + loader=missing, cache_size=missing, auto_reload=missing, + bytecode_cache=missing): + """Create a new overlay environment that shares all the data with the + current environment except for cache and the overridden attributes. + Extensions cannot be removed for an overlayed environment. An overlayed + environment automatically gets all the extensions of the environment it + is linked to plus optional extra extensions. + + Creating overlays should happen after the initial environment was set + up completely. Not all attributes are truly linked, some are just + copied over so modifications on the original environment may not shine + through. + """ + args = dict(locals()) + del args['self'], args['cache_size'], args['extensions'] + + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.overlayed = True + rv.linked_to = self + + for key, value in iteritems(args): + if value is not missing: + setattr(rv, key, value) + + if cache_size is not missing: + rv.cache = create_cache(cache_size) + else: + rv.cache = copy_cache(self.cache) + + rv.extensions = {} + for key, value in iteritems(self.extensions): + rv.extensions[key] = value.bind(rv) + if extensions is not missing: + rv.extensions.update(load_extensions(rv, extensions)) + + return _environment_sanity_check(rv) + + lexer = property(get_lexer, doc="The lexer for this environment.") + + def iter_extensions(self): + """Iterates over the extensions by priority.""" + return iter(sorted(self.extensions.values(), + key=lambda x: x.priority)) + + def getitem(self, obj, argument): + """Get an item or attribute of an object but prefer the item.""" + try: + return obj[argument] + except (AttributeError, TypeError, LookupError): + if isinstance(argument, string_types): + try: + attr = str(argument) + except Exception: + pass + else: + try: + return getattr(obj, attr) + except AttributeError: + pass + return self.undefined(obj=obj, name=argument) + + def getattr(self, obj, attribute): + """Get an item or attribute of an object but prefer the attribute. + Unlike :meth:`getitem` the attribute *must* be a bytestring. + """ + try: + return getattr(obj, attribute) + except AttributeError: + pass + try: + return obj[attribute] + except (TypeError, LookupError, AttributeError): + return self.undefined(obj=obj, name=attribute) + + def call_filter(self, name, value, args=None, kwargs=None, + context=None, eval_ctx=None): + """Invokes a filter on a value the same way the compiler does it. + + Note that on Python 3 this might return a coroutine in case the + filter is running from an environment in async mode and the filter + supports async execution. It's your responsibility to await this + if needed. + + .. versionadded:: 2.7 + """ + func = self.filters.get(name) + if func is None: + fail_for_missing_callable('no filter named %r', name) + args = [value] + list(args or ()) + if getattr(func, 'contextfilter', False): + if context is None: + raise TemplateRuntimeError('Attempted to invoke context ' + 'filter without context') + args.insert(0, context) + elif getattr(func, 'evalcontextfilter', False): + if eval_ctx is None: + if context is not None: + eval_ctx = context.eval_ctx + else: + eval_ctx = EvalContext(self) + args.insert(0, eval_ctx) + elif getattr(func, 'environmentfilter', False): + args.insert(0, self) + return func(*args, **(kwargs or {})) + + def call_test(self, name, value, args=None, kwargs=None): + """Invokes a test on a value the same way the compiler does it. + + .. versionadded:: 2.7 + """ + func = self.tests.get(name) + if func is None: + fail_for_missing_callable('no test named %r', name) + return func(value, *(args or ()), **(kwargs or {})) + + @internalcode + def parse(self, source, name=None, filename=None): + """Parse the sourcecode and return the abstract syntax tree. This + tree of nodes is used by the compiler to convert the template into + executable source- or bytecode. This is useful for debugging or to + extract information from templates. + + If you are :ref:`developing Jinja2 extensions ` + this gives you a good overview of the node tree generated. + """ + try: + return self._parse(source, name, filename) + except TemplateSyntaxError: + exc_info = sys.exc_info() + self.handle_exception(exc_info, source_hint=source) + + def _parse(self, source, name, filename): + """Internal parsing function used by `parse` and `compile`.""" + return Parser(self, source, name, encode_filename(filename)).parse() + + def lex(self, source, name=None, filename=None): + """Lex the given sourcecode and return a generator that yields + tokens as tuples in the form ``(lineno, token_type, value)``. + This can be useful for :ref:`extension development ` + and debugging templates. + + This does not perform preprocessing. If you want the preprocessing + of the extensions to be applied you have to filter source through + the :meth:`preprocess` method. + """ + source = text_type(source) + try: + return self.lexer.tokeniter(source, name, filename) + except TemplateSyntaxError: + exc_info = sys.exc_info() + self.handle_exception(exc_info, source_hint=source) + + def preprocess(self, source, name=None, filename=None): + """Preprocesses the source with all extensions. This is automatically + called for all parsing and compiling methods but *not* for :meth:`lex` + because there you usually only want the actual source tokenized. + """ + return reduce(lambda s, e: e.preprocess(s, name, filename), + self.iter_extensions(), text_type(source)) + + def _tokenize(self, source, name, filename=None, state=None): + """Called by the parser to do the preprocessing and filtering + for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. + """ + source = self.preprocess(source, name, filename) + stream = self.lexer.tokenize(source, name, filename, state) + for ext in self.iter_extensions(): + stream = ext.filter_stream(stream) + if not isinstance(stream, TokenStream): + stream = TokenStream(stream, name, filename) + return stream + + def _generate(self, source, name, filename, defer_init=False): + """Internal hook that can be overridden to hook a different generate + method in. + + .. versionadded:: 2.5 + """ + return generate(source, self, name, filename, defer_init=defer_init, + optimized=self.optimized) + + def _compile(self, source, filename): + """Internal hook that can be overridden to hook a different compile + method in. + + .. versionadded:: 2.5 + """ + return compile(source, filename, 'exec') + + @internalcode + def compile(self, source, name=None, filename=None, raw=False, + defer_init=False): + """Compile a node or template source code. The `name` parameter is + the load name of the template after it was joined using + :meth:`join_path` if necessary, not the filename on the file system. + the `filename` parameter is the estimated filename of the template on + the file system. If the template came from a database or memory this + can be omitted. + + The return value of this method is a python code object. If the `raw` + parameter is `True` the return value will be a string with python + code equivalent to the bytecode returned otherwise. This method is + mainly used internally. + + `defer_init` is use internally to aid the module code generator. This + causes the generated code to be able to import without the global + environment variable to be set. + + .. versionadded:: 2.4 + `defer_init` parameter added. + """ + source_hint = None + try: + if isinstance(source, string_types): + source_hint = source + source = self._parse(source, name, filename) + source = self._generate(source, name, filename, + defer_init=defer_init) + if raw: + return source + if filename is None: + filename = '